• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_renderer.cpp:
7 //    Implements the class methods for Renderer.
8 //
9 
10 #include "libANGLE/renderer/vulkan/vk_renderer.h"
11 
12 // Placing this first seems to solve an intellisense bug.
13 #include "libANGLE/renderer/vulkan/vk_utils.h"
14 
15 #include <EGL/eglext.h>
16 
17 #include "common/debug.h"
18 #include "common/platform.h"
19 #include "common/system_utils.h"
20 #include "common/vulkan/libvulkan_loader.h"
21 #include "common/vulkan/vulkan_icd.h"
22 #include "gpu_info_util/SystemInfo.h"
23 #include "libANGLE/Context.h"
24 #include "libANGLE/Display.h"
25 #include "libANGLE/renderer/driver_utils.h"
26 #include "libANGLE/renderer/vulkan/CompilerVk.h"
27 #include "libANGLE/renderer/vulkan/ContextVk.h"
28 #include "libANGLE/renderer/vulkan/DisplayVk.h"
29 #include "libANGLE/renderer/vulkan/FramebufferVk.h"
30 #include "libANGLE/renderer/vulkan/ProgramVk.h"
31 #include "libANGLE/renderer/vulkan/SyncVk.h"
32 #include "libANGLE/renderer/vulkan/VertexArrayVk.h"
33 #include "libANGLE/renderer/vulkan/vk_caps_utils.h"
34 #include "libANGLE/renderer/vulkan/vk_format_utils.h"
35 #include "libANGLE/renderer/vulkan/vk_resource.h"
36 #include "libANGLE/trace.h"
37 #include "platform/PlatformMethods.h"
38 
39 // Consts
40 namespace
41 {
42 #if defined(ANGLE_PLATFORM_ANDROID)
43 constexpr const char *kDefaultPipelineCacheGraphDumpPath = "/data/local/tmp/angle_dumps/";
44 #else
45 constexpr const char *kDefaultPipelineCacheGraphDumpPath = "";
46 #endif  // ANGLE_PLATFORM_ANDROID
47 
48 constexpr VkFormatFeatureFlags kInvalidFormatFeatureFlags = static_cast<VkFormatFeatureFlags>(-1);
49 
50 #if defined(ANGLE_EXPOSE_NON_CONFORMANT_EXTENSIONS_AND_VERSIONS)
51 constexpr bool kExposeNonConformantExtensionsAndVersions = true;
52 #else
53 constexpr bool kExposeNonConformantExtensionsAndVersions = false;
54 #endif
55 
56 #if defined(ANGLE_ENABLE_CRC_FOR_PIPELINE_CACHE)
57 constexpr bool kEnableCRCForPipelineCache = true;
58 #else
59 constexpr bool kEnableCRCForPipelineCache = false;
60 #endif
61 }  // anonymous namespace
62 
63 namespace rx
64 {
65 namespace vk
66 {
67 namespace
68 {
69 constexpr uint32_t kMinDefaultUniformBufferSize = 16 * 1024u;
70 // This size is picked based on experience. Majority of devices support 64K
71 // maxUniformBufferSize. Since this is per context buffer, a bigger buffer size reduces the
72 // number of descriptor set allocations, so we picked the maxUniformBufferSize that most
73 // devices supports. It may needs further tuning based on specific device needs and balance
74 // between performance and memory usage.
75 constexpr uint32_t kPreferredDefaultUniformBufferSize = 64 * 1024u;
76 
77 // Maximum size to use VMA image suballocation. Any allocation greater than or equal to this
78 // value will use a dedicated VkDeviceMemory.
79 constexpr size_t kImageSizeThresholdForDedicatedMemoryAllocation = 4 * 1024 * 1024;
80 
81 // Pipeline cache header version. It should be incremented any time there is an update to the cache
82 // header or data structure.
83 constexpr uint32_t kPipelineCacheVersion = 2;
84 
85 // Update the pipeline cache every this many swaps.
86 constexpr uint32_t kPipelineCacheVkUpdatePeriod = 60;
87 // The minimum version of Vulkan that ANGLE requires.  If an instance or device below this version
88 // is encountered, initialization will fail.
89 constexpr uint32_t kMinimumVulkanAPIVersion = VK_API_VERSION_1_1;
90 // Per the Vulkan specification, ANGLE must indicate the highest version of Vulkan functionality
91 // that it uses.  The Vulkan validation layers will issue messages for any core functionality that
92 // requires a higher version.
93 //
94 // ANGLE specifically limits its core version to Vulkan 1.1 and relies on availability of
95 // extensions.  While implementations are not required to expose an extension that is promoted to
96 // later versions, they always do so in practice.  Avoiding later core versions helps keep the
97 // initialization logic simpler.
98 constexpr uint32_t kPreferredVulkanAPIVersion = VK_API_VERSION_1_1;
99 
IsVulkan11(uint32_t apiVersion)100 bool IsVulkan11(uint32_t apiVersion)
101 {
102     return apiVersion >= VK_API_VERSION_1_1;
103 }
104 
IsRADV(uint32_t vendorId,uint32_t driverId,const char * deviceName)105 bool IsRADV(uint32_t vendorId, uint32_t driverId, const char *deviceName)
106 {
107     // Check against RADV driver id first.
108     if (driverId == VK_DRIVER_ID_MESA_RADV)
109     {
110         return true;
111     }
112 
113     // Otherwise, look for RADV in the device name.  This works for both RADV
114     // and Venus-over-RADV.
115     return IsAMD(vendorId) && strstr(deviceName, "RADV") != nullptr;
116 }
117 
IsQualcommOpenSource(uint32_t vendorId,uint32_t driverId,const char * deviceName)118 bool IsQualcommOpenSource(uint32_t vendorId, uint32_t driverId, const char *deviceName)
119 {
120     if (!IsQualcomm(vendorId))
121     {
122         return false;
123     }
124 
125     // Where driver id is available, distinguish by driver id:
126     if (driverId != 0)
127     {
128         return driverId != VK_DRIVER_ID_QUALCOMM_PROPRIETARY;
129     }
130 
131     // Otherwise, look for Venus or Turnip in the device name.
132     return strstr(deviceName, "Venus") != nullptr || strstr(deviceName, "Turnip") != nullptr;
133 }
134 
IsXclipse()135 bool IsXclipse()
136 {
137     if (!IsAndroid())
138     {
139         return false;
140     }
141 
142     std::string modelName;
143     if (!angle::android::GetSystemProperty(angle::android::kModelSystemPropertyName, &modelName))
144     {
145         return 0;
146     }
147 
148     // Improve this when more Xclipse devices are available
149     return strstr(modelName.c_str(), "SM-S901B") != nullptr;
150 }
151 
StrLess(const char * a,const char * b)152 bool StrLess(const char *a, const char *b)
153 {
154     return strcmp(a, b) < 0;
155 }
156 
ExtensionFound(const char * needle,const vk::ExtensionNameList & haystack)157 bool ExtensionFound(const char *needle, const vk::ExtensionNameList &haystack)
158 {
159     // NOTE: The list must be sorted.
160     return std::binary_search(haystack.begin(), haystack.end(), needle, StrLess);
161 }
162 
VerifyExtensionsPresent(const vk::ExtensionNameList & haystack,const vk::ExtensionNameList & needles)163 VkResult VerifyExtensionsPresent(const vk::ExtensionNameList &haystack,
164                                  const vk::ExtensionNameList &needles)
165 {
166     // NOTE: The lists must be sorted.
167     if (std::includes(haystack.begin(), haystack.end(), needles.begin(), needles.end(), StrLess))
168     {
169         return VK_SUCCESS;
170     }
171     for (const char *needle : needles)
172     {
173         if (!ExtensionFound(needle, haystack))
174         {
175             ERR() << "Extension not supported: " << needle;
176         }
177     }
178     return VK_ERROR_EXTENSION_NOT_PRESENT;
179 }
180 
181 // Array of Validation error/warning messages that will be ignored, should include bugID
182 constexpr const char *kSkippedMessages[] = {
183     // http://anglebug.com/8401
184     "Undefined-Value-ShaderOutputNotConsumed",
185     "Undefined-Value-ShaderInputNotProduced",
186     // http://anglebug.com/5304
187     "VUID-vkCmdDraw-magFilter-04553",
188     "VUID-vkCmdDrawIndexed-magFilter-04553",
189     // http://anglebug.com/5912
190     "VUID-VkImageViewCreateInfo-pNext-01585",
191     // http://anglebug.com/6514
192     "vkEnumeratePhysicalDevices: One or more layers modified physical devices",
193     // When using Vulkan secondary command buffers, the command buffer is begun with the current
194     // framebuffer specified in pInheritanceInfo::framebuffer.  If the framebuffer is multisampled
195     // and is resolved, an optimization would change the framebuffer to add the resolve target and
196     // use a subpass resolve operation instead.  The following error complains that the framebuffer
197     // used to start the render pass and the one specified in pInheritanceInfo::framebuffer must be
198     // equal, which is not true in that case.  In practice, this is benign, as the part of the
199     // framebuffer that's accessed by the command buffer is identically laid out.
200     // http://anglebug.com/6811
201     "VUID-vkCmdExecuteCommands-pCommandBuffers-00099",
202     // http://anglebug.com/7325
203     "VUID-vkCmdBindVertexBuffers2-pStrides-06209",
204     // http://anglebug.com/7729
205     "VUID-vkDestroySemaphore-semaphore-01137",
206     "VUID-vkDestroySemaphore-semaphore-05149",
207     // https://issuetracker.google.com/303219657
208     "VUID-VkGraphicsPipelineCreateInfo-pStages-00738",
209     // http://anglebug.com/7861
210     "VUID-vkCmdDraw-None-06887",
211     "VUID-vkCmdDraw-None-06886",
212     "VUID-vkCmdDrawIndexed-None-06887",
213     // http://anglebug.com/8394
214     "VUID-vkCmdDraw-None-09000",
215     "VUID-vkCmdDrawIndexed-None-09002",
216     // http://anglebug.com/7865
217     "VUID-VkDescriptorImageInfo-imageView-06711",
218     "VUID-VkDescriptorImageInfo-descriptorType-06713",
219     // http://crbug.com/1412096
220     "VUID-VkImageCreateInfo-pNext-00990",
221     // http://anglebug.com/8119
222     "VUID-VkGraphicsPipelineCreateInfo-Input-07904",
223     "VUID-VkGraphicsPipelineCreateInfo-Input-07905",
224     "VUID-vkCmdDrawIndexed-None-07835",
225     "VUID-VkGraphicsPipelineCreateInfo-Input-08733",
226     "VUID-vkCmdDraw-Input-08734",
227     // https://anglebug.com/8128#c3
228     "VUID-VkBufferViewCreateInfo-format-08779",
229     // https://anglebug.com/8203
230     "VUID-VkVertexInputBindingDivisorDescriptionEXT-divisor-01870",
231     // https://anglebug.com/8454
232     "VUID-VkVertexInputBindingDivisorDescriptionKHR-divisor-01870",
233     // https://anglebug.com/8237
234     "VUID-VkGraphicsPipelineCreateInfo-topology-08773",
235     // https://anglebug.com/7291
236     "VUID-vkCmdBlitImage-srcImage-00240",
237     // https://anglebug.com/8242
238     // VVL bug: https://github.com/KhronosGroup/Vulkan-ValidationLayers/issues/7858
239     "VUID-vkCmdDraw-None-08608",
240     "VUID-vkCmdDrawIndexed-None-08608",
241     // https://anglebug.com/8242
242     // Invalid feedback loop caused by the application
243     "VUID-vkCmdDraw-None-09000",
244     "VUID-vkCmdDrawIndexed-None-09000",
245     "VUID-vkCmdDraw-None-09002",
246     "VUID-vkCmdDrawIndexed-None-09002",
247     "VUID-vkCmdDraw-None-09003",
248     "VUID-vkCmdDrawIndexed-None-09003",
249     // https://anglebug.com/8334
250     "VUID-VkDescriptorImageInfo-imageView-07796",
251     // https://issuetracker.google.com/303441816
252     "VUID-VkRenderPassBeginInfo-renderPass-00904",
253     // http://anglebug.com/8466
254     "VUID-VkMemoryAllocateInfo-allocationSize-01742",
255     "VUID-VkMemoryDedicatedAllocateInfo-image-01878",
256     // http://anglebug.com/8468
257     "VUID-vkCmdDraw-pNext-09461",
258     // http://anglebug.com/8470
259     "VUID-VkImportMemoryFdInfoKHR-handleType-00667",
260     // http://anglebug.com/8482
261     "VUID-VkImportMemoryWin32HandleInfoKHR-handleType-00658",
262     // https://anglebug.com/8497
263     "VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-01912",
264     // https://anglebug.com/8522
265     "VUID-VkPipelineVertexInputStateCreateInfo-pNext-pNext",
266     // https://issuetracker.google.com/319228278
267     "VUID-vkCmdDrawIndexed-format-07753",
268     "VUID-vkCmdDraw-format-07753",
269     "Undefined-Value-ShaderFragmentOutputMismatch",
270     // https://issuetracker.google.com/336652255
271     "UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout",
272     // https://issuetracker.google.com/336847261
273     "VUID-VkImageCreateInfo-pNext-02397",
274     "VUID-vkCmdDraw-None-06550",
275     // https://anglebug.com/345304850
276     "WARNING-Shader-OutputNotConsumed",
277 };
278 
279 // Validation messages that should be ignored only when VK_EXT_primitive_topology_list_restart is
280 // not present.
281 constexpr const char *kNoListRestartSkippedMessages[] = {
282     // http://anglebug.com/3832
283     "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-06252",
284 };
285 
286 // VVL appears has a bug tracking stageMask on VkEvent with secondary command buffer.
287 // https://github.com/KhronosGroup/Vulkan-ValidationLayers/issues/7849
288 constexpr const char *kSkippedMessagesWithVulkanSecondaryCommandBuffer[] = {
289     "VUID-vkCmdWaitEvents-srcStageMask-parameter",
290 };
291 
292 // Some syncval errors are resolved in the presence of the NONE load or store render pass ops.  For
293 // those, ANGLE makes no further attempt to resolve them and expects vendor support for the
294 // extensions instead.  The list of skipped messages is split based on this support.
295 constexpr vk::SkippedSyncvalMessage kSkippedSyncvalMessages[] = {
296     // http://anglebug.com/6416
297     // http://anglebug.com/6421
298     {
299         "SYNC-HAZARD-WRITE-AFTER-WRITE",
300         "Access info (usage: SYNC_IMAGE_LAYOUT_TRANSITION, prior_usage: "
301         "SYNC_IMAGE_LAYOUT_TRANSITION, "
302         "write_barriers: 0, command: vkCmdEndRenderPass",
303     },
304     // These errors are caused by a feedback loop tests that don't produce correct Vulkan to begin
305     // with.
306     // http://anglebug.com/6417
307     // http://anglebug.com/7070
308     //
309     // Occassionally, this is due to VVL's lack of support for some extensions.  For example,
310     // syncval doesn't properly account for VK_EXT_fragment_shader_interlock, which gives
311     // synchronization guarantees without the need for an image barrier.
312     // https://github.com/KhronosGroup/Vulkan-ValidationLayers/issues/4387
313     {
314         "SYNC-HAZARD-READ-AFTER-WRITE",
315         "imageLayout: VK_IMAGE_LAYOUT_GENERAL",
316         "usage: SYNC_FRAGMENT_SHADER_SHADER_",
317     },
318     // http://anglebug.com/6551
319     {
320         "SYNC-HAZARD-WRITE-AFTER-WRITE",
321         "Access info (usage: SYNC_IMAGE_LAYOUT_TRANSITION, prior_usage: "
322         "SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, write_barriers: "
323         "SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_READ|SYNC_EARLY_FRAGMENT_TESTS_DEPTH_"
324         "STENCIL_ATTACHMENT_WRITE|SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_READ|SYNC_LATE_"
325         "FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE|SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_"
326         "ATTACHMENT_"
327         "READ|SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, command: vkCmdEndRenderPass",
328     },
329     {
330         "SYNC-HAZARD-WRITE-AFTER-WRITE",
331         "Access info (usage: SYNC_IMAGE_LAYOUT_TRANSITION, prior_usage: "
332         "SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, write_barriers: "
333         "SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ|SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_"
334         "ATTACHMENT_WRITE, command: vkCmdEndRenderPass",
335     },
336     // From: TraceTest.manhattan_31 with SwiftShader and
337     // VulkanPerformanceCounterTest.NewTextureDoesNotBreakRenderPass for both depth and stencil
338     // aspect. http://anglebug.com/6701.
339     // Additionally hit in the asphalt_9 trace
340     // https://issuetracker.google.com/316337308
341     {
342         "SYNC-HAZARD-WRITE-AFTER-WRITE",
343         "Hazard WRITE_AFTER_WRITE in subpass ",
344         "during load with loadOp VK_ATTACHMENT_LOAD_OP_DONT_CARE. Access info (usage: "
345         "SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, prior_usage: "
346         "SYNC_IMAGE_LAYOUT_TRANSITION",
347     },
348     // From various tests. The validation layer does not calculate the exact vertexCounts that's
349     // being accessed. http://anglebug.com/6725
350     {
351         "SYNC-HAZARD-READ-AFTER-WRITE",
352         "Hazard READ_AFTER_WRITE for vertex",
353         "usage: SYNC_VERTEX_ATTRIBUTE_INPUT_VERTEX_ATTRIBUTE_READ",
354     },
355     {
356         "SYNC-HAZARD-READ-AFTER-WRITE",
357         "Hazard READ_AFTER_WRITE for index",
358         "usage: SYNC_INDEX_INPUT_INDEX_READ",
359     },
360     {
361         "SYNC-HAZARD-WRITE-AFTER-READ",
362         "Hazard WRITE_AFTER_READ for",
363         "Access info (usage: SYNC_VERTEX_SHADER_SHADER_STORAGE_WRITE, prior_usage: "
364         "SYNC_VERTEX_ATTRIBUTE_INPUT_VERTEX_ATTRIBUTE_READ",
365     },
366     {
367         "SYNC-HAZARD-WRITE-AFTER-READ",
368         "Hazard WRITE_AFTER_READ for dstBuffer VkBuffer",
369         "Access info (usage: SYNC_COPY_TRANSFER_WRITE, prior_usage: "
370         "SYNC_VERTEX_ATTRIBUTE_INPUT_VERTEX_ATTRIBUTE_READ",
371     },
372     {
373         "SYNC-HAZARD-WRITE-AFTER-READ",
374         "Hazard WRITE_AFTER_READ for VkBuffer",
375         "Access info (usage: SYNC_COMPUTE_SHADER_SHADER_STORAGE_WRITE, prior_usage: "
376         "SYNC_VERTEX_ATTRIBUTE_INPUT_VERTEX_ATTRIBUTE_READ",
377     },
378     // From: MultisampledRenderToTextureES3Test.TransformFeedbackTest. http://anglebug.com/6725
379     {
380         "SYNC-HAZARD-WRITE-AFTER-WRITE",
381         "vkCmdBeginRenderPass: Hazard WRITE_AFTER_WRITE in subpass",
382         "write_barriers: "
383         "SYNC_TRANSFORM_FEEDBACK_EXT_TRANSFORM_FEEDBACK_COUNTER_READ_EXT|SYNC_TRANSFORM_FEEDBACK_"
384         "EXT_"
385         "TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT",
386     },
387     // http://anglebug.com/8054 (VkNonDispatchableHandle on x86 bots)
388     {
389         "SYNC-HAZARD-READ-AFTER-WRITE",
390         "Hazard READ_AFTER_WRITE for VkBuffer",
391         "usage: SYNC_VERTEX_SHADER_SHADER_STORAGE_READ",
392     },
393     {
394         "SYNC-HAZARD-READ-AFTER-WRITE",
395         "Hazard READ_AFTER_WRITE for VkNonDispatchableHandle",
396         "usage: SYNC_VERTEX_SHADER_SHADER_STORAGE_READ",
397     },
398     // From: TraceTest.manhattan_31 with SwiftShader. These failures appears related to
399     // dynamic uniform buffers. The failures are gone if I force mUniformBufferDescriptorType to
400     // VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER. My guess is that syncval is not doing a fine grain enough
401     // range tracking with dynamic uniform buffers. http://anglebug.com/6725
402     {
403         "SYNC-HAZARD-WRITE-AFTER-READ",
404         "usage: SYNC_VERTEX_SHADER_UNIFORM_READ",
405     },
406     {
407         "SYNC-HAZARD-READ-AFTER-WRITE",
408         "usage: SYNC_VERTEX_SHADER_UNIFORM_READ",
409     },
410     {
411         "SYNC-HAZARD-WRITE-AFTER-READ",
412         "type: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC",
413     },
414     {
415         "SYNC-HAZARD-READ-AFTER-WRITE",
416         "type: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC",
417     },
418     // Coherent framebuffer fetch is enabled on some platforms that are known a priori to have the
419     // needed behavior, even though this is not specified in the Vulkan spec.  These generate
420     // syncval errors that are benign on those platforms.
421     // http://anglebug.com/6870
422     // From: TraceTest.dead_by_daylight
423     // From: TraceTest.genshin_impact
424     {"SYNC-HAZARD-READ-AFTER-WRITE",
425      "vkCmdBeginRenderPass():  Hazard READ_AFTER_WRITE in subpass 0 for attachment ",
426      "aspect color during load with loadOp VK_ATTACHMENT_LOAD_OP_LOAD. Access info (usage: "
427      "SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ, prior_usage: "
428      "SYNC_IMAGE_LAYOUT_TRANSITION, write_barriers: 0, command: vkCmdEndRenderPass",
429      true},
430     {"SYNC-HAZARD-WRITE-AFTER-WRITE",
431      "vkCmdBeginRenderPass():  Hazard WRITE_AFTER_WRITE in subpass 0 for attachment ",
432      "image layout transition (old_layout: VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, new_layout: "
433      "VK_IMAGE_LAYOUT_GENERAL). Access info (usage: SYNC_IMAGE_LAYOUT_TRANSITION, prior_usage: "
434      "SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, write_barriers:",
435      true},
436     // From: TraceTest.special_forces_group_2 http://anglebug.com/5592
437     {
438         "SYNC-HAZARD-WRITE-AFTER-READ",
439         "Access info (usage: SYNC_IMAGE_LAYOUT_TRANSITION, prior_usage: "
440         "SYNC_FRAGMENT_SHADER_SHADER_",
441     },
442     // http://anglebug.com/7031
443     {"SYNC-HAZARD-READ-AFTER-WRITE",
444      "type: VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, imageLayout: "
445      "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, binding #0, index 0. Access info (usage: "
446      "SYNC_COMPUTE_SHADER_SHADER_",
447      "", false},
448     // http://anglebug.com/7456
449     {
450         "SYNC-HAZARD-READ-AFTER-WRITE",
451         "type: VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, "
452         "imageLayout: VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL",
453         "Access info (usage: SYNC_FRAGMENT_SHADER_SHADER_",
454     },
455     // From: TraceTest.life_is_strange http://anglebug.com/7711
456     {"SYNC-HAZARD-WRITE-AFTER-READ",
457      "vkCmdEndRenderPass():  Hazard WRITE_AFTER_READ in subpass 0 for attachment 1 "
458      "depth aspect during store with storeOp VK_ATTACHMENT_STORE_OP_DONT_CARE. "
459      "Access info (usage: SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, "
460      "prior_usage: SYNC_FRAGMENT_SHADER_SHADER_"},
461     // From: TraceTest.life_is_strange http://anglebug.com/7711
462     {"SYNC-HAZARD-READ-AFTER-WRITE",
463      "type: VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, "
464      "imageLayout: VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL",
465      "usage: SYNC_FRAGMENT_SHADER_SHADER_"},
466     // From: TraceTest.diablo_immortal http://anglebug.com/7837
467     {"SYNC-HAZARD-WRITE-AFTER-WRITE", "Hazard WRITE_AFTER_WRITE for VkImageView ",
468      "Subpass #0, and pColorAttachments #0. Access info (usage: "
469      "SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, prior_usage: "
470      "SYNC_IMAGE_LAYOUT_TRANSITION, write_barriers: 0, command: vkCmdEndRenderPass"},
471     // From: TraceTest.diablo_immortal http://anglebug.com/7837
472     {"SYNC-HAZARD-WRITE-AFTER-READ",
473      "load with loadOp VK_ATTACHMENT_LOAD_OP_DONT_CARE. Access info (usage: "
474      "SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, prior_usage: "
475      "SYNC_FRAGMENT_SHADER_SHADER_"},
476     // From: TraceTest.catalyst_black http://anglebug.com/7924
477     {"SYNC-HAZARD-WRITE-AFTER-READ",
478      "store with storeOp VK_ATTACHMENT_STORE_OP_STORE. Access info (usage: "
479      "SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, prior_usage: "
480      "SYNC_FRAGMENT_SHADER_SHADER_"},
481 };
482 
483 // Messages that shouldn't be generated if storeOp=NONE is supported, otherwise they are expected.
484 constexpr vk::SkippedSyncvalMessage kSkippedSyncvalMessagesWithoutStoreOpNone[] = {
485     // These errors are generated when simultaneously using a read-only depth/stencil attachment as
486     // sampler.  This is valid Vulkan.
487     //
488     // When storeOp=NONE is not present, ANGLE uses storeOp=STORE, but considers the image read-only
489     // and produces a hazard.  ANGLE relies on storeOp=NONE and so this is not expected to be worked
490     // around.
491     //
492     // With storeOp=NONE, there is another bug where a depth/stencil attachment may use storeOp=NONE
493     // for depth while storeOp=DONT_CARE for stencil, and the latter causes a synchronization error
494     // (similarly to the previous case as DONT_CARE is also a write operation).
495     // http://anglebug.com/5962
496     {
497         "SYNC-HAZARD-WRITE-AFTER-READ",
498         "depth aspect during store with storeOp VK_ATTACHMENT_STORE_OP_STORE. Access info (usage: "
499         "SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE",
500         "usage: SYNC_FRAGMENT_SHADER_SHADER_",
501     },
502     {
503         "SYNC-HAZARD-WRITE-AFTER-READ",
504         "stencil aspect during store with stencilStoreOp VK_ATTACHMENT_STORE_OP_STORE. Access info "
505         "(usage: SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE",
506         "usage: SYNC_FRAGMENT_SHADER_SHADER_",
507     },
508     {
509         "SYNC-HAZARD-READ-AFTER-WRITE",
510         "imageLayout: VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL",
511         "usage: SYNC_FRAGMENT_SHADER_SHADER_",
512     },
513     // From: TraceTest.antutu_refinery http://anglebug.com/6663
514     {
515         "SYNC-HAZARD-READ-AFTER-WRITE",
516         "imageLayout: VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL",
517         "usage: SYNC_COMPUTE_SHADER_SHADER_SAMPLED_READ",
518     },
519 };
520 
521 // Messages that shouldn't be generated if both loadOp=NONE and storeOp=NONE are supported,
522 // otherwise they are expected.
523 constexpr vk::SkippedSyncvalMessage kSkippedSyncvalMessagesWithoutLoadStoreOpNone[] = {
524     // This error is generated for multiple reasons:
525     //
526     // - http://anglebug.com/6411
527     // - http://anglebug.com/5371: This is resolved with storeOp=NONE
528     {
529         "SYNC-HAZARD-WRITE-AFTER-WRITE",
530         "Access info (usage: SYNC_IMAGE_LAYOUT_TRANSITION, prior_usage: "
531         "SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, write_barriers: 0, command: "
532         "vkCmdEndRenderPass",
533     },
534     // http://anglebug.com/6411
535     // http://anglebug.com/6584
536     {
537         "SYNC-HAZARD-WRITE-AFTER-WRITE",
538         "aspect depth during load with loadOp VK_ATTACHMENT_LOAD_OP_DONT_CARE. Access info (usage: "
539         "SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, prior_usage: "
540         "SYNC_IMAGE_LAYOUT_TRANSITION",
541     },
542     {
543         "SYNC-HAZARD-WRITE-AFTER-WRITE",
544         "aspect stencil during load with loadOp VK_ATTACHMENT_LOAD_OP_DONT_CARE. Access info "
545         "(usage: "
546         "SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE",
547     },
548     // http://anglebug.com/5962
549     {
550         "SYNC-HAZARD-WRITE-AFTER-WRITE",
551         "aspect stencil during load with loadOp VK_ATTACHMENT_LOAD_OP_DONT_CARE. Access info "
552         "(usage: "
553         "SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, prior_usage: "
554         "SYNC_IMAGE_LAYOUT_TRANSITION",
555     },
556     {
557         "SYNC-HAZARD-WRITE-AFTER-WRITE",
558         "aspect stencil during load with loadOp VK_ATTACHMENT_LOAD_OP_DONT_CARE. Access info "
559         "(usage: "
560         "SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, prior_usage: "
561         "SYNC_IMAGE_LAYOUT_TRANSITION",
562     },
563 };
564 
565 // Messages that are only generated with MSRTT emulation.  Some of these are syncval bugs (discussed
566 // in https://gitlab.khronos.org/vulkan/vulkan/-/issues/3840)
567 constexpr vk::SkippedSyncvalMessage kSkippedSyncvalMessagesWithMSRTTEmulation[] = {
568     // False positive: https://gitlab.khronos.org/vulkan/vulkan/-/issues/3840
569     {
570         "SYNC-HAZARD-READ-AFTER-WRITE",
571         "during depth/stencil resolve read",
572         "SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ",
573     },
574     // Unknown whether ANGLE or syncval bug.
575     {
576         "SYNC-HAZARD-WRITE-AFTER-WRITE",
577         "vkCmdBeginRenderPass():  Hazard WRITE_AFTER_WRITE in subpass 0 for attachment",
578         "image layout transition (old_layout: VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "
579         "new_layout: VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL). Access info (usage: "
580         "SYNC_IMAGE_LAYOUT_TRANSITION",
581     },
582 };
583 
584 enum class DebugMessageReport
585 {
586     Ignore,
587     Print,
588 };
589 
IsMessageInSkipList(const char * message,const char * const skippedList[],size_t skippedListSize)590 bool IsMessageInSkipList(const char *message,
591                          const char *const skippedList[],
592                          size_t skippedListSize)
593 {
594     for (size_t index = 0; index < skippedListSize; ++index)
595     {
596         if (strstr(message, skippedList[index]) != nullptr)
597         {
598             return true;
599         }
600     }
601 
602     return false;
603 }
604 
605 // Suppress validation errors that are known.  Returns DebugMessageReport::Ignore in that case.
ShouldReportDebugMessage(Renderer * renderer,const char * messageId,const char * message)606 DebugMessageReport ShouldReportDebugMessage(Renderer *renderer,
607                                             const char *messageId,
608                                             const char *message)
609 {
610     if (message == nullptr || messageId == nullptr)
611     {
612         return DebugMessageReport::Print;
613     }
614 
615     // Check with non-syncval messages:
616     const std::vector<const char *> &skippedMessages = renderer->getSkippedValidationMessages();
617     if (IsMessageInSkipList(message, skippedMessages.data(), skippedMessages.size()))
618     {
619         return DebugMessageReport::Ignore;
620     }
621 
622     // Then check with syncval messages:
623     const bool isFramebufferFetchUsed = renderer->isFramebufferFetchUsed();
624 
625     for (const vk::SkippedSyncvalMessage &msg : renderer->getSkippedSyncvalMessages())
626     {
627         if (strstr(messageId, msg.messageId) == nullptr ||
628             strstr(message, msg.messageContents1) == nullptr ||
629             strstr(message, msg.messageContents2) == nullptr)
630         {
631             continue;
632         }
633 
634         // If the error is due to exposing coherent framebuffer fetch (without
635         // VK_EXT_rasterization_order_attachment_access), but framebuffer fetch has not been used by
636         // the application, report it.
637         //
638         // Note that currently syncval doesn't support the
639         // VK_EXT_rasterization_order_attachment_access extension, so the syncval messages would
640         // continue to be produced despite the extension.
641         constexpr bool kSyncValSupportsRasterizationOrderExtension = false;
642         const bool hasRasterizationOrderExtension =
643             renderer->getFeatures().supportsRasterizationOrderAttachmentAccess.enabled &&
644             kSyncValSupportsRasterizationOrderExtension;
645         if (msg.isDueToNonConformantCoherentFramebufferFetch &&
646             (!isFramebufferFetchUsed || hasRasterizationOrderExtension))
647         {
648             return DebugMessageReport::Print;
649         }
650 
651         // Otherwise ignore the message
652         return DebugMessageReport::Ignore;
653     }
654 
655     return DebugMessageReport::Print;
656 }
657 
GetVkObjectTypeName(VkObjectType type)658 const char *GetVkObjectTypeName(VkObjectType type)
659 {
660     switch (type)
661     {
662         case VK_OBJECT_TYPE_UNKNOWN:
663             return "Unknown";
664         case VK_OBJECT_TYPE_INSTANCE:
665             return "Instance";
666         case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
667             return "Physical Device";
668         case VK_OBJECT_TYPE_DEVICE:
669             return "Device";
670         case VK_OBJECT_TYPE_QUEUE:
671             return "Queue";
672         case VK_OBJECT_TYPE_SEMAPHORE:
673             return "Semaphore";
674         case VK_OBJECT_TYPE_COMMAND_BUFFER:
675             return "Command Buffer";
676         case VK_OBJECT_TYPE_FENCE:
677             return "Fence";
678         case VK_OBJECT_TYPE_DEVICE_MEMORY:
679             return "Device Memory";
680         case VK_OBJECT_TYPE_BUFFER:
681             return "Buffer";
682         case VK_OBJECT_TYPE_IMAGE:
683             return "Image";
684         case VK_OBJECT_TYPE_EVENT:
685             return "Event";
686         case VK_OBJECT_TYPE_QUERY_POOL:
687             return "Query Pool";
688         case VK_OBJECT_TYPE_BUFFER_VIEW:
689             return "Buffer View";
690         case VK_OBJECT_TYPE_IMAGE_VIEW:
691             return "Image View";
692         case VK_OBJECT_TYPE_SHADER_MODULE:
693             return "Shader Module";
694         case VK_OBJECT_TYPE_PIPELINE_CACHE:
695             return "Pipeline Cache";
696         case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
697             return "Pipeline Layout";
698         case VK_OBJECT_TYPE_RENDER_PASS:
699             return "Render Pass";
700         case VK_OBJECT_TYPE_PIPELINE:
701             return "Pipeline";
702         case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
703             return "Descriptor Set Layout";
704         case VK_OBJECT_TYPE_SAMPLER:
705             return "Sampler";
706         case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
707             return "Descriptor Pool";
708         case VK_OBJECT_TYPE_DESCRIPTOR_SET:
709             return "Descriptor Set";
710         case VK_OBJECT_TYPE_FRAMEBUFFER:
711             return "Framebuffer";
712         case VK_OBJECT_TYPE_COMMAND_POOL:
713             return "Command Pool";
714         case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION:
715             return "Sampler YCbCr Conversion";
716         case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE:
717             return "Descriptor Update Template";
718         case VK_OBJECT_TYPE_SURFACE_KHR:
719             return "Surface";
720         case VK_OBJECT_TYPE_SWAPCHAIN_KHR:
721             return "Swapchain";
722         case VK_OBJECT_TYPE_DISPLAY_KHR:
723             return "Display";
724         case VK_OBJECT_TYPE_DISPLAY_MODE_KHR:
725             return "Display Mode";
726         case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV:
727             return "Indirect Commands Layout";
728         case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT:
729             return "Debug Utils Messenger";
730         case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT:
731             return "Validation Cache";
732         case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV:
733             return "Acceleration Structure";
734         default:
735             return "<Unrecognized>";
736     }
737 }
738 
739 VKAPI_ATTR VkBool32 VKAPI_CALL
DebugUtilsMessenger(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VkDebugUtilsMessageTypeFlagsEXT messageTypes,const VkDebugUtilsMessengerCallbackDataEXT * callbackData,void * userData)740 DebugUtilsMessenger(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
741                     VkDebugUtilsMessageTypeFlagsEXT messageTypes,
742                     const VkDebugUtilsMessengerCallbackDataEXT *callbackData,
743                     void *userData)
744 {
745     Renderer *renderer = static_cast<Renderer *>(userData);
746 
747     // VUID-VkDebugUtilsMessengerCallbackDataEXT-pMessage-parameter
748     // pMessage must be a null-terminated UTF-8 string
749     ASSERT(callbackData->pMessage != nullptr);
750 
751     // See if it's an issue we are aware of and don't want to be spammed about.
752     // Always report the debug message if message ID is missing
753     if (callbackData->pMessageIdName != nullptr &&
754         ShouldReportDebugMessage(renderer, callbackData->pMessageIdName, callbackData->pMessage) ==
755             DebugMessageReport::Ignore)
756     {
757         return VK_FALSE;
758     }
759 
760     std::ostringstream log;
761     if (callbackData->pMessageIdName != nullptr)
762     {
763         log << "[ " << callbackData->pMessageIdName << " ] ";
764     }
765     log << callbackData->pMessage << std::endl;
766 
767     // Aesthetic value based on length of the function name, line number, etc.
768     constexpr size_t kStartIndent = 28;
769 
770     // Output the debug marker hierarchy under which this error has occured.
771     size_t indent = kStartIndent;
772     if (callbackData->queueLabelCount > 0)
773     {
774         log << std::string(indent++, ' ') << "<Queue Label Hierarchy:>" << std::endl;
775         for (uint32_t i = 0; i < callbackData->queueLabelCount; ++i)
776         {
777             log << std::string(indent++, ' ') << callbackData->pQueueLabels[i].pLabelName
778                 << std::endl;
779         }
780     }
781     if (callbackData->cmdBufLabelCount > 0)
782     {
783         log << std::string(indent++, ' ') << "<Command Buffer Label Hierarchy:>" << std::endl;
784         for (uint32_t i = 0; i < callbackData->cmdBufLabelCount; ++i)
785         {
786             log << std::string(indent++, ' ') << callbackData->pCmdBufLabels[i].pLabelName
787                 << std::endl;
788         }
789     }
790     // Output the objects involved in this error message.
791     if (callbackData->objectCount > 0)
792     {
793         for (uint32_t i = 0; i < callbackData->objectCount; ++i)
794         {
795             const char *objectName = callbackData->pObjects[i].pObjectName;
796             const char *objectType = GetVkObjectTypeName(callbackData->pObjects[i].objectType);
797             uint64_t objectHandle  = callbackData->pObjects[i].objectHandle;
798             log << std::string(indent, ' ') << "Object: ";
799             if (objectHandle == 0)
800             {
801                 log << "VK_NULL_HANDLE";
802             }
803             else
804             {
805                 log << "0x" << std::hex << objectHandle << std::dec;
806             }
807             log << " (type = " << objectType << "(" << callbackData->pObjects[i].objectType << "))";
808             if (objectName)
809             {
810                 log << " [" << objectName << "]";
811             }
812             log << std::endl;
813         }
814     }
815 
816     bool isError    = (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) != 0;
817     std::string msg = log.str();
818 
819     renderer->onNewValidationMessage(msg);
820 
821     if (isError)
822     {
823         ERR() << msg;
824     }
825     else
826     {
827         WARN() << msg;
828     }
829 
830     return VK_FALSE;
831 }
832 
833 VKAPI_ATTR void VKAPI_CALL
MemoryReportCallback(const VkDeviceMemoryReportCallbackDataEXT * callbackData,void * userData)834 MemoryReportCallback(const VkDeviceMemoryReportCallbackDataEXT *callbackData, void *userData)
835 {
836     Renderer *renderer = static_cast<Renderer *>(userData);
837     renderer->processMemoryReportCallback(*callbackData);
838 }
839 
LimitVersionTo(const gl::Version & current,const gl::Version & lower)840 gl::Version LimitVersionTo(const gl::Version &current, const gl::Version &lower)
841 {
842     return std::min(current, lower);
843 }
844 
FencePropertiesCompatibleWithAndroid(const VkExternalFenceProperties & externalFenceProperties)845 [[maybe_unused]] bool FencePropertiesCompatibleWithAndroid(
846     const VkExternalFenceProperties &externalFenceProperties)
847 {
848     // handleType here is the external fence type -
849     // we want type compatible with creating and export/dup() Android FD
850 
851     // Imported handleType that can be exported - need for vkGetFenceFdKHR()
852     if ((externalFenceProperties.exportFromImportedHandleTypes &
853          VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) == 0)
854     {
855         return false;
856     }
857 
858     // HandleTypes which can be specified at creating a fence
859     if ((externalFenceProperties.compatibleHandleTypes &
860          VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) == 0)
861     {
862         return false;
863     }
864 
865     constexpr VkExternalFenceFeatureFlags kFeatureFlags =
866         (VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR |
867          VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR);
868     if ((externalFenceProperties.externalFenceFeatures & kFeatureFlags) != kFeatureFlags)
869     {
870         return false;
871     }
872 
873     return true;
874 }
875 
SemaphorePropertiesCompatibleWithAndroid(const VkExternalSemaphoreProperties & externalSemaphoreProperties)876 [[maybe_unused]] bool SemaphorePropertiesCompatibleWithAndroid(
877     const VkExternalSemaphoreProperties &externalSemaphoreProperties)
878 {
879     // handleType here is the external semaphore type -
880     // we want type compatible with importing an Android FD
881 
882     constexpr VkExternalSemaphoreFeatureFlags kFeatureFlags =
883         (VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR);
884     if ((externalSemaphoreProperties.externalSemaphoreFeatures & kFeatureFlags) != kFeatureFlags)
885     {
886         return false;
887     }
888 
889     return true;
890 }
891 
892 // Exclude memory type indices that include the host-visible bit from VMA image suballocation.
GetMemoryTypeBitsExcludingHostVisible(Renderer * renderer,VkMemoryPropertyFlags propertyFlags,uint32_t availableMemoryTypeBits)893 uint32_t GetMemoryTypeBitsExcludingHostVisible(Renderer *renderer,
894                                                VkMemoryPropertyFlags propertyFlags,
895                                                uint32_t availableMemoryTypeBits)
896 {
897     const vk::MemoryProperties &memoryProperties = renderer->getMemoryProperties();
898     ASSERT(memoryProperties.getMemoryTypeCount() <= 32);
899     uint32_t memoryTypeBitsOut = availableMemoryTypeBits;
900 
901     // For best allocation results, the memory type indices that include the host-visible flag bit
902     // are removed.
903     for (size_t memoryIndex : angle::BitSet<32>(availableMemoryTypeBits))
904     {
905         VkMemoryPropertyFlags memoryFlags =
906             memoryProperties.getMemoryType(static_cast<uint32_t>(memoryIndex)).propertyFlags;
907         if ((memoryFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
908         {
909             memoryTypeBitsOut &= ~(angle::Bit<uint32_t>(memoryIndex));
910             continue;
911         }
912 
913         // If the protected bit is not required, all memory type indices with this bit should be
914         // ignored.
915         if ((memoryFlags & ~propertyFlags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
916         {
917             memoryTypeBitsOut &= ~(angle::Bit<uint32_t>(memoryIndex));
918         }
919     }
920 
921     return memoryTypeBitsOut;
922 }
923 
924 // Header data type used for the pipeline cache.
925 ANGLE_ENABLE_STRUCT_PADDING_WARNINGS
926 
927 class CacheDataHeader
928 {
929   public:
setData(uint32_t compressedDataCRC,uint32_t cacheDataSize,uint16_t numChunks,uint16_t chunkIndex)930     void setData(uint32_t compressedDataCRC,
931                  uint32_t cacheDataSize,
932                  uint16_t numChunks,
933                  uint16_t chunkIndex)
934     {
935         mVersion           = kPipelineCacheVersion;
936         mCompressedDataCRC = compressedDataCRC;
937         mCacheDataSize     = cacheDataSize;
938         mNumChunks         = numChunks;
939         mChunkIndex        = chunkIndex;
940     }
941 
getData(uint32_t * versionOut,uint32_t * compressedDataCRCOut,uint32_t * cacheDataSizeOut,size_t * numChunksOut,size_t * chunkIndexOut) const942     void getData(uint32_t *versionOut,
943                  uint32_t *compressedDataCRCOut,
944                  uint32_t *cacheDataSizeOut,
945                  size_t *numChunksOut,
946                  size_t *chunkIndexOut) const
947     {
948         *versionOut           = mVersion;
949         *compressedDataCRCOut = mCompressedDataCRC;
950         *cacheDataSizeOut     = mCacheDataSize;
951         *numChunksOut         = static_cast<size_t>(mNumChunks);
952         *chunkIndexOut        = static_cast<size_t>(mChunkIndex);
953     }
954 
955   private:
956     // For pipeline cache, the values stored in key data has the following order:
957     // {headerVersion, compressedDataCRC, originalCacheSize, numChunks, chunkIndex;
958     // chunkCompressedData}. The header values are used to validate the data. For example, if the
959     // original and compressed sizes are 70000 bytes (68k) and 68841 bytes (67k), the compressed
960     // data will be divided into two chunks: {ver,crc0,70000,2,0;34421 bytes} and
961     // {ver,crc1,70000,2,1;34420 bytes}.
962     // The version is used to keep track of the cache format. Please note that kPipelineCacheVersion
963     // must be incremented by 1 in case of any updates to the cache header or data structure. While
964     // it is possible to modify the fields in the header, it is recommended to keep the version on
965     // top and the same size unless absolutely necessary.
966 
967     uint32_t mVersion;
968     uint32_t mCompressedDataCRC;
969     uint32_t mCacheDataSize;
970     uint16_t mNumChunks;
971     uint16_t mChunkIndex;
972 };
973 
974 ANGLE_DISABLE_STRUCT_PADDING_WARNINGS
975 
976 // Pack header data for the pipeline cache key data.
PackHeaderDataForPipelineCache(uint32_t compressedDataCRC,uint32_t cacheDataSize,uint16_t numChunks,uint16_t chunkIndex,CacheDataHeader * dataOut)977 void PackHeaderDataForPipelineCache(uint32_t compressedDataCRC,
978                                     uint32_t cacheDataSize,
979                                     uint16_t numChunks,
980                                     uint16_t chunkIndex,
981                                     CacheDataHeader *dataOut)
982 {
983     dataOut->setData(compressedDataCRC, cacheDataSize, numChunks, chunkIndex);
984 }
985 
986 // Unpack header data from the pipeline cache key data.
UnpackHeaderDataForPipelineCache(CacheDataHeader * data,uint32_t * versionOut,uint32_t * compressedDataCRCOut,uint32_t * cacheDataSizeOut,size_t * numChunksOut,size_t * chunkIndexOut)987 void UnpackHeaderDataForPipelineCache(CacheDataHeader *data,
988                                       uint32_t *versionOut,
989                                       uint32_t *compressedDataCRCOut,
990                                       uint32_t *cacheDataSizeOut,
991                                       size_t *numChunksOut,
992                                       size_t *chunkIndexOut)
993 {
994     data->getData(versionOut, compressedDataCRCOut, cacheDataSizeOut, numChunksOut, chunkIndexOut);
995 }
996 
ComputePipelineCacheVkChunkKey(VkPhysicalDeviceProperties physicalDeviceProperties,const uint8_t chunkIndex,angle::BlobCacheKey * hashOut)997 void ComputePipelineCacheVkChunkKey(VkPhysicalDeviceProperties physicalDeviceProperties,
998                                     const uint8_t chunkIndex,
999                                     angle::BlobCacheKey *hashOut)
1000 {
1001     std::ostringstream hashStream("ANGLE Pipeline Cache: ", std::ios_base::ate);
1002     // Add the pipeline cache UUID to make sure the blob cache always gives a compatible pipeline
1003     // cache.  It's not particularly necessary to write it as a hex number as done here, so long as
1004     // there is no '\0' in the result.
1005     for (const uint32_t c : physicalDeviceProperties.pipelineCacheUUID)
1006     {
1007         hashStream << std::hex << c;
1008     }
1009     // Add the vendor and device id too for good measure.
1010     hashStream << std::hex << physicalDeviceProperties.vendorID;
1011     hashStream << std::hex << physicalDeviceProperties.deviceID;
1012 
1013     // Add chunkIndex to generate unique key for chunks.
1014     hashStream << std::hex << static_cast<uint32_t>(chunkIndex);
1015 
1016     const std::string &hashString = hashStream.str();
1017     angle::base::SHA1HashBytes(reinterpret_cast<const unsigned char *>(hashString.c_str()),
1018                                hashString.length(), hashOut->data());
1019 }
1020 
CompressAndStorePipelineCacheVk(VkPhysicalDeviceProperties physicalDeviceProperties,vk::GlobalOps * globalOps,ContextVk * contextVk,const std::vector<uint8_t> & cacheData,const size_t maxTotalSize)1021 void CompressAndStorePipelineCacheVk(VkPhysicalDeviceProperties physicalDeviceProperties,
1022                                      vk::GlobalOps *globalOps,
1023                                      ContextVk *contextVk,
1024                                      const std::vector<uint8_t> &cacheData,
1025                                      const size_t maxTotalSize)
1026 {
1027     // Though the pipeline cache will be compressed and divided into several chunks to store in blob
1028     // cache, the largest total size of blob cache is only 2M in android now, so there is no use to
1029     // handle big pipeline cache when android will reject it finally.
1030     if (cacheData.size() >= maxTotalSize)
1031     {
1032         // TODO: handle the big pipeline cache. http://anglebug.com/4722
1033         ANGLE_PERF_WARNING(contextVk->getDebug(), GL_DEBUG_SEVERITY_LOW,
1034                            "Skip syncing pipeline cache data when it's larger than maxTotalSize.");
1035         return;
1036     }
1037 
1038     // To make it possible to store more pipeline cache data, compress the whole pipelineCache.
1039     angle::MemoryBuffer compressedData;
1040 
1041     if (!angle::CompressBlob(cacheData.size(), cacheData.data(), &compressedData))
1042     {
1043         ANGLE_PERF_WARNING(contextVk->getDebug(), GL_DEBUG_SEVERITY_LOW,
1044                            "Skip syncing pipeline cache data as it failed compression.");
1045         return;
1046     }
1047 
1048     // If the size of compressedData is larger than (kMaxBlobCacheSize - sizeof(numChunks)),
1049     // the pipelineCache still can't be stored in blob cache. Divide the large compressed
1050     // pipelineCache into several parts to store seperately. There is no function to
1051     // query the limit size in android.
1052     constexpr size_t kMaxBlobCacheSize = 64 * 1024;
1053     size_t compressedOffset            = 0;
1054 
1055     const size_t numChunks = UnsignedCeilDivide(static_cast<unsigned int>(compressedData.size()),
1056                                                 kMaxBlobCacheSize - sizeof(CacheDataHeader));
1057     ASSERT(numChunks <= UINT16_MAX);
1058     size_t chunkSize = UnsignedCeilDivide(static_cast<unsigned int>(compressedData.size()),
1059                                           static_cast<unsigned int>(numChunks));
1060     uint32_t compressedDataCRC = 0;
1061     if (kEnableCRCForPipelineCache)
1062     {
1063         compressedDataCRC = angle::GenerateCrc(compressedData.data(), compressedData.size());
1064     }
1065 
1066     for (size_t chunkIndex = 0; chunkIndex < numChunks; ++chunkIndex)
1067     {
1068         if (chunkIndex == numChunks - 1)
1069         {
1070             chunkSize = compressedData.size() - compressedOffset;
1071         }
1072 
1073         angle::MemoryBuffer keyData;
1074         if (!keyData.resize(sizeof(CacheDataHeader) + chunkSize))
1075         {
1076             ANGLE_PERF_WARNING(contextVk->getDebug(), GL_DEBUG_SEVERITY_LOW,
1077                                "Skip syncing pipeline cache data due to out of memory.");
1078             return;
1079         }
1080 
1081         // Add the header data, followed by the compressed data.
1082         ASSERT(cacheData.size() <= UINT32_MAX);
1083         CacheDataHeader headerData = {};
1084         PackHeaderDataForPipelineCache(compressedDataCRC, static_cast<uint32_t>(cacheData.size()),
1085                                        static_cast<uint16_t>(numChunks),
1086                                        static_cast<uint16_t>(chunkIndex), &headerData);
1087         memcpy(keyData.data(), &headerData, sizeof(CacheDataHeader));
1088         memcpy(keyData.data() + sizeof(CacheDataHeader), compressedData.data() + compressedOffset,
1089                chunkSize);
1090         compressedOffset += chunkSize;
1091 
1092         // Create unique hash key.
1093         angle::BlobCacheKey chunkCacheHash;
1094         ComputePipelineCacheVkChunkKey(physicalDeviceProperties, chunkIndex, &chunkCacheHash);
1095 
1096         globalOps->putBlob(chunkCacheHash, keyData);
1097     }
1098 }
1099 
1100 class CompressAndStorePipelineCacheTask : public angle::Closure
1101 {
1102   public:
CompressAndStorePipelineCacheTask(vk::GlobalOps * globalOps,ContextVk * contextVk,std::vector<uint8_t> && cacheData,size_t kMaxTotalSize)1103     CompressAndStorePipelineCacheTask(vk::GlobalOps *globalOps,
1104                                       ContextVk *contextVk,
1105                                       std::vector<uint8_t> &&cacheData,
1106                                       size_t kMaxTotalSize)
1107         : mGlobalOps(globalOps),
1108           mContextVk(contextVk),
1109           mCacheData(std::move(cacheData)),
1110           mMaxTotalSize(kMaxTotalSize)
1111     {}
1112 
operator ()()1113     void operator()() override
1114     {
1115         ANGLE_TRACE_EVENT0("gpu.angle", "CompressAndStorePipelineCacheVk");
1116         CompressAndStorePipelineCacheVk(mContextVk->getRenderer()->getPhysicalDeviceProperties(),
1117                                         mGlobalOps, mContextVk, mCacheData, mMaxTotalSize);
1118     }
1119 
1120   private:
1121     vk::GlobalOps *mGlobalOps;
1122     ContextVk *mContextVk;
1123     std::vector<uint8_t> mCacheData;
1124     size_t mMaxTotalSize;
1125 };
1126 
GetAndDecompressPipelineCacheVk(VkPhysicalDeviceProperties physicalDeviceProperties,vk::Context * context,vk::GlobalOps * globalOps,angle::MemoryBuffer * uncompressedData,bool * success)1127 angle::Result GetAndDecompressPipelineCacheVk(VkPhysicalDeviceProperties physicalDeviceProperties,
1128                                               vk::Context *context,
1129                                               vk::GlobalOps *globalOps,
1130                                               angle::MemoryBuffer *uncompressedData,
1131                                               bool *success)
1132 {
1133     // Make sure that the bool output is initialized to false.
1134     *success = false;
1135 
1136     // Compute the hash key of chunkIndex 0 and find the first cache data in blob cache.
1137     angle::BlobCacheKey chunkCacheHash;
1138     ComputePipelineCacheVkChunkKey(physicalDeviceProperties, 0, &chunkCacheHash);
1139     angle::BlobCacheValue keyData;
1140 
1141     if (!globalOps->getBlob(chunkCacheHash, &keyData) || keyData.size() < sizeof(CacheDataHeader))
1142     {
1143         // Nothing in the cache.
1144         return angle::Result::Continue;
1145     }
1146 
1147     // Get the number of chunks and other values from the header for data validation.
1148     uint32_t cacheVersion;
1149     uint32_t compressedDataCRC;
1150     uint32_t uncompressedCacheDataSize;
1151     size_t numChunks;
1152     size_t chunkIndex0;
1153 
1154     CacheDataHeader headerData = {};
1155     memcpy(&headerData, keyData.data(), sizeof(CacheDataHeader));
1156     UnpackHeaderDataForPipelineCache(&headerData, &cacheVersion, &compressedDataCRC,
1157                                      &uncompressedCacheDataSize, &numChunks, &chunkIndex0);
1158     if (cacheVersion == kPipelineCacheVersion)
1159     {
1160         // The data must not contain corruption.
1161         if (chunkIndex0 != 0 || numChunks == 0 || uncompressedCacheDataSize == 0)
1162         {
1163             FATAL() << "Unexpected values while unpacking chunk index 0: " << "cacheVersion = "
1164                     << cacheVersion << ", chunkIndex = " << chunkIndex0
1165                     << ", numChunks = " << numChunks
1166                     << ", uncompressedCacheDataSize = " << uncompressedCacheDataSize;
1167         }
1168     }
1169     else
1170     {
1171         WARN() << "Change in cache header version detected: " << "newVersion = "
1172                << kPipelineCacheVersion << ", existingVersion = " << cacheVersion;
1173 
1174         return angle::Result::Continue;
1175     }
1176 
1177     size_t chunkSize      = keyData.size() - sizeof(CacheDataHeader);
1178     size_t compressedSize = 0;
1179 
1180     // Allocate enough memory.
1181     angle::MemoryBuffer compressedData;
1182     ANGLE_VK_CHECK(context, compressedData.resize(chunkSize * numChunks),
1183                    VK_ERROR_INITIALIZATION_FAILED);
1184 
1185     // To combine the parts of the pipelineCache data.
1186     for (size_t chunkIndex = 0; chunkIndex < numChunks; ++chunkIndex)
1187     {
1188         // Get the unique key by chunkIndex.
1189         ComputePipelineCacheVkChunkKey(physicalDeviceProperties, chunkIndex, &chunkCacheHash);
1190 
1191         if (!globalOps->getBlob(chunkCacheHash, &keyData) ||
1192             keyData.size() < sizeof(CacheDataHeader))
1193         {
1194             // Can't find every part of the cache data.
1195             WARN() << "Failed to get pipeline cache chunk " << chunkIndex << " of " << numChunks;
1196             return angle::Result::Continue;
1197         }
1198 
1199         // Validate the header values and ensure there is enough space to store.
1200         uint32_t checkCacheVersion;
1201         uint32_t checkCompressedDataCRC;
1202         uint32_t checkUncompressedCacheDataSize;
1203         size_t checkNumChunks;
1204         size_t checkChunkIndex;
1205 
1206         memcpy(&headerData, keyData.data(), sizeof(CacheDataHeader));
1207         UnpackHeaderDataForPipelineCache(&headerData, &checkCacheVersion, &checkCompressedDataCRC,
1208                                          &checkUncompressedCacheDataSize, &checkNumChunks,
1209                                          &checkChunkIndex);
1210 
1211         chunkSize = keyData.size() - sizeof(CacheDataHeader);
1212         bool isHeaderDataCorrupted =
1213             (checkCacheVersion != cacheVersion) || (checkNumChunks != numChunks) ||
1214             (checkUncompressedCacheDataSize != uncompressedCacheDataSize) ||
1215             (checkCompressedDataCRC != compressedDataCRC) || (checkChunkIndex != chunkIndex) ||
1216             (compressedData.size() < compressedSize + chunkSize);
1217         if (isHeaderDataCorrupted)
1218         {
1219             WARN() << "Pipeline cache chunk header corrupted: " << "checkCacheVersion = "
1220                    << checkCacheVersion << ", cacheVersion = " << cacheVersion
1221                    << ", checkNumChunks = " << checkNumChunks << ", numChunks = " << numChunks
1222                    << ", checkUncompressedCacheDataSize = " << checkUncompressedCacheDataSize
1223                    << ", uncompressedCacheDataSize = " << uncompressedCacheDataSize
1224                    << ", checkCompressedDataCRC = " << checkCompressedDataCRC
1225                    << ", compressedDataCRC = " << compressedDataCRC
1226                    << ", checkChunkIndex = " << checkChunkIndex << ", chunkIndex = " << chunkIndex
1227                    << ", compressedData.size() = " << compressedData.size()
1228                    << ", (compressedSize + chunkSize) = " << (compressedSize + chunkSize);
1229             return angle::Result::Continue;
1230         }
1231 
1232         memcpy(compressedData.data() + compressedSize, keyData.data() + sizeof(CacheDataHeader),
1233                chunkSize);
1234         compressedSize += chunkSize;
1235     }
1236 
1237     // CRC for compressed data and size for decompressed data should match the values in the header.
1238     if (kEnableCRCForPipelineCache)
1239     {
1240         uint32_t computedCompressedDataCRC =
1241             angle::GenerateCrc(compressedData.data(), compressedSize);
1242         if (computedCompressedDataCRC != compressedDataCRC)
1243         {
1244             if (compressedDataCRC == 0)
1245             {
1246                 // This could be due to the cache being populated before kEnableCRCForPipelineCache
1247                 // was enabled.
1248                 WARN() << "Expected CRC = " << compressedDataCRC
1249                        << ", Actual CRC = " << computedCompressedDataCRC;
1250                 return angle::Result::Continue;
1251             }
1252 
1253             // If the expected CRC is non-zero and does not match the actual CRC from the data,
1254             // there has been an unexpected data corruption.
1255             ERR() << "Expected CRC = " << compressedDataCRC
1256                   << ", Actual CRC = " << computedCompressedDataCRC;
1257 
1258             ERR() << "Data extracted from the cache headers: " << std::hex
1259                   << ", compressedDataCRC = 0x" << compressedDataCRC << "numChunks = 0x"
1260                   << numChunks << ", uncompressedCacheDataSize = 0x" << uncompressedCacheDataSize;
1261 
1262             FATAL() << "CRC check failed; possible pipeline cache data corruption.";
1263             return angle::Result::Stop;
1264         }
1265     }
1266 
1267     ANGLE_VK_CHECK(context,
1268                    angle::DecompressBlob(compressedData.data(), compressedSize,
1269                                          uncompressedCacheDataSize, uncompressedData),
1270                    VK_ERROR_INITIALIZATION_FAILED);
1271 
1272     if (uncompressedData->size() != uncompressedCacheDataSize)
1273     {
1274         WARN() << "Expected uncompressed size = " << uncompressedCacheDataSize
1275                << ", Actual uncompressed size = " << uncompressedData->size();
1276         return angle::Result::Continue;
1277     }
1278 
1279     *success = true;
1280     return angle::Result::Continue;
1281 }
1282 
1283 // Environment variable (and associated Android property) to enable Vulkan debug-utils markers
1284 constexpr char kEnableDebugMarkersVarName[]      = "ANGLE_ENABLE_DEBUG_MARKERS";
1285 constexpr char kEnableDebugMarkersPropertyName[] = "debug.angle.markers";
1286 
GetShadingRateFromVkExtent(const VkExtent2D & extent)1287 ANGLE_INLINE gl::ShadingRate GetShadingRateFromVkExtent(const VkExtent2D &extent)
1288 {
1289     if (extent.width == 1)
1290     {
1291         if (extent.height == 1)
1292         {
1293             return gl::ShadingRate::_1x1;
1294         }
1295         else if (extent.height == 2)
1296         {
1297             return gl::ShadingRate::_1x2;
1298         }
1299     }
1300     else if (extent.width == 2)
1301     {
1302         if (extent.height == 1)
1303         {
1304             return gl::ShadingRate::_2x1;
1305         }
1306         else if (extent.height == 2)
1307         {
1308             return gl::ShadingRate::_2x2;
1309         }
1310     }
1311     else if (extent.width == 4)
1312     {
1313         if (extent.height == 2)
1314         {
1315             return gl::ShadingRate::_4x2;
1316         }
1317         else if (extent.height == 4)
1318         {
1319             return gl::ShadingRate::_4x4;
1320         }
1321     }
1322 
1323     return gl::ShadingRate::Undefined;
1324 }
1325 
DumpPipelineCacheGraph(Renderer * renderer,const std::ostringstream & graph)1326 void DumpPipelineCacheGraph(Renderer *renderer, const std::ostringstream &graph)
1327 {
1328     std::string dumpPath = renderer->getPipelineCacheGraphDumpPath();
1329     if (dumpPath.size() == 0)
1330     {
1331         WARN() << "No path supplied for pipeline cache graph dump!";
1332         return;
1333     }
1334 
1335     static std::atomic<uint32_t> sContextIndex(0);
1336     std::string filename = dumpPath;
1337     filename += angle::GetExecutableName();
1338     filename += std::to_string(sContextIndex.fetch_add(1));
1339     filename += ".dump";
1340 
1341     INFO() << "Dumping pipeline cache transition graph to: \"" << filename << "\"";
1342 
1343     std::ofstream out = std::ofstream(filename, std::ofstream::binary);
1344     if (!out.is_open())
1345     {
1346         ERR() << "Failed to open \"" << filename << "\"";
1347     }
1348 
1349     out << "digraph {\n" << " node [shape=box";
1350     if (renderer->getFeatures().supportsPipelineCreationFeedback.enabled)
1351     {
1352         out << ",color=green";
1353     }
1354     out << "]\n";
1355     out << graph.str();
1356     out << "}\n";
1357     out.close();
1358 }
1359 }  // namespace
1360 
1361 // OneOffCommandPool implementation.
OneOffCommandPool()1362 OneOffCommandPool::OneOffCommandPool() : mProtectionType(vk::ProtectionType::InvalidEnum) {}
1363 
init(vk::ProtectionType protectionType)1364 void OneOffCommandPool::init(vk::ProtectionType protectionType)
1365 {
1366     ASSERT(!mCommandPool.valid());
1367     mProtectionType = protectionType;
1368 }
1369 
destroy(VkDevice device)1370 void OneOffCommandPool::destroy(VkDevice device)
1371 {
1372     std::unique_lock<angle::SimpleMutex> lock(mMutex);
1373     for (PendingOneOffCommands &pending : mPendingCommands)
1374     {
1375         pending.commandBuffer.releaseHandle();
1376     }
1377     mCommandPool.destroy(device);
1378     mProtectionType = vk::ProtectionType::InvalidEnum;
1379 }
1380 
getCommandBuffer(vk::Context * context,vk::PrimaryCommandBuffer * commandBufferOut)1381 angle::Result OneOffCommandPool::getCommandBuffer(vk::Context *context,
1382                                                   vk::PrimaryCommandBuffer *commandBufferOut)
1383 {
1384     std::unique_lock<angle::SimpleMutex> lock(mMutex);
1385 
1386     if (!mPendingCommands.empty() &&
1387         context->getRenderer()->hasResourceUseFinished(mPendingCommands.front().use))
1388     {
1389         *commandBufferOut = std::move(mPendingCommands.front().commandBuffer);
1390         mPendingCommands.pop_front();
1391         ANGLE_VK_TRY(context, commandBufferOut->reset());
1392     }
1393     else
1394     {
1395         if (!mCommandPool.valid())
1396         {
1397             VkCommandPoolCreateInfo createInfo = {};
1398             createInfo.sType                   = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
1399             createInfo.flags                   = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT |
1400                                VK_COMMAND_POOL_CREATE_TRANSIENT_BIT;
1401             ASSERT(mProtectionType == vk::ProtectionType::Unprotected ||
1402                    mProtectionType == vk::ProtectionType::Protected);
1403             if (mProtectionType == vk::ProtectionType::Protected)
1404             {
1405                 createInfo.flags |= VK_COMMAND_POOL_CREATE_PROTECTED_BIT;
1406             }
1407             ANGLE_VK_TRY(context, mCommandPool.init(context->getDevice(), createInfo));
1408         }
1409 
1410         VkCommandBufferAllocateInfo allocInfo = {};
1411         allocInfo.sType                       = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
1412         allocInfo.level                       = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
1413         allocInfo.commandBufferCount          = 1;
1414         allocInfo.commandPool                 = mCommandPool.getHandle();
1415 
1416         ANGLE_VK_TRY(context, commandBufferOut->init(context->getDevice(), allocInfo));
1417     }
1418 
1419     VkCommandBufferBeginInfo beginInfo = {};
1420     beginInfo.sType                    = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
1421     beginInfo.flags                    = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
1422     beginInfo.pInheritanceInfo         = nullptr;
1423     ANGLE_VK_TRY(context, commandBufferOut->begin(beginInfo));
1424 
1425     return angle::Result::Continue;
1426 }
1427 
releaseCommandBuffer(const QueueSerial & submitQueueSerial,vk::PrimaryCommandBuffer && primary)1428 void OneOffCommandPool::releaseCommandBuffer(const QueueSerial &submitQueueSerial,
1429                                              vk::PrimaryCommandBuffer &&primary)
1430 {
1431     std::unique_lock<angle::SimpleMutex> lock(mMutex);
1432     mPendingCommands.push_back({vk::ResourceUse(submitQueueSerial), std::move(primary)});
1433 }
1434 
1435 // Renderer implementation.
Renderer()1436 Renderer::Renderer()
1437     : mGlobalOps(nullptr),
1438       mLibVulkanLibrary(nullptr),
1439       mCapsInitialized(false),
1440       mInstanceVersion(0),
1441       mDeviceVersion(0),
1442       mInstance(VK_NULL_HANDLE),
1443       mEnableValidationLayers(false),
1444       mEnableDebugUtils(false),
1445       mAngleDebuggerMode(false),
1446       mEnabledICD(angle::vk::ICD::Default),
1447       mDebugUtilsMessenger(VK_NULL_HANDLE),
1448       mPhysicalDevice(VK_NULL_HANDLE),
1449       mMaxVertexAttribDivisor(1),
1450       mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
1451       mMaxVertexAttribStride(0),
1452       mDefaultUniformBufferSize(kPreferredDefaultUniformBufferSize),
1453       mDevice(VK_NULL_HANDLE),
1454       mDeviceLost(false),
1455       mStagingBufferAlignment(1),
1456       mHostVisibleVertexConversionBufferMemoryTypeIndex(kInvalidMemoryTypeIndex),
1457       mDeviceLocalVertexConversionBufferMemoryTypeIndex(kInvalidMemoryTypeIndex),
1458       mVertexConversionBufferAlignment(1),
1459       mPipelineCacheVkUpdateTimeout(kPipelineCacheVkUpdatePeriod),
1460       mPipelineCacheSizeAtLastSync(0),
1461       mPipelineCacheInitialized(false),
1462       mValidationMessageCount(0),
1463       mCommandProcessor(this, &mCommandQueue),
1464       mSupportedBufferWritePipelineStageMask(0),
1465       mSupportedVulkanShaderStageMask(0),
1466       mMemoryAllocationTracker(MemoryAllocationTracker(this)),
1467       mPlaceHolderDescriptorSetLayout(nullptr)
1468 {
1469     VkFormatProperties invalid = {0, 0, kInvalidFormatFeatureFlags};
1470     mFormatProperties.fill(invalid);
1471     mStagingBufferMemoryTypeIndex.fill(kInvalidMemoryTypeIndex);
1472 
1473     // We currently don't have any big-endian devices in the list of supported platforms.  There are
1474     // a number of places in the Vulkan backend that make this assumption.  This assertion is made
1475     // early to fail immediately on big-endian platforms.
1476     ASSERT(IsLittleEndian());
1477 
1478     mDumpPipelineCacheGraph =
1479         (angle::GetEnvironmentVarOrAndroidProperty("ANGLE_DUMP_PIPELINE_CACHE_GRAPH",
1480                                                    "angle.dump_pipeline_cache_graph") == "1");
1481 
1482     mPipelineCacheGraphDumpPath = angle::GetEnvironmentVarOrAndroidProperty(
1483         "ANGLE_PIPELINE_CACHE_GRAPH_DUMP_PATH", "angle.pipeline_cache_graph_dump_path");
1484     if (mPipelineCacheGraphDumpPath.size() == 0)
1485     {
1486         mPipelineCacheGraphDumpPath = kDefaultPipelineCacheGraphDumpPath;
1487     }
1488 }
1489 
~Renderer()1490 Renderer::~Renderer() {}
1491 
hasSharedGarbage()1492 bool Renderer::hasSharedGarbage()
1493 {
1494     return !mSharedGarbageList.empty() || !mSuballocationGarbageList.empty();
1495 }
1496 
onDestroy(vk::Context * context)1497 void Renderer::onDestroy(vk::Context *context)
1498 {
1499     if (isDeviceLost())
1500     {
1501         handleDeviceLost();
1502     }
1503 
1504     if (mPlaceHolderDescriptorSetLayout && mPlaceHolderDescriptorSetLayout->get().valid())
1505     {
1506         ASSERT(!mPlaceHolderDescriptorSetLayout->isReferenced());
1507         mPlaceHolderDescriptorSetLayout->get().destroy(getDevice());
1508         SafeDelete(mPlaceHolderDescriptorSetLayout);
1509     }
1510 
1511     mCommandProcessor.destroy(context);
1512     mCommandQueue.destroy(context);
1513 
1514     // mCommandQueue.destroy should already set "last completed" serials to infinite.
1515     cleanupGarbage();
1516     ASSERT(!hasSharedGarbage());
1517     ASSERT(mOrphanedBufferBlockList.empty());
1518 
1519     mRefCountedEventRecycler.destroy(mDevice);
1520 
1521     for (OneOffCommandPool &oneOffCommandPool : mOneOffCommandPoolMap)
1522     {
1523         oneOffCommandPool.destroy(mDevice);
1524     }
1525 
1526     mPipelineCache.destroy(mDevice);
1527     mSamplerCache.destroy(this);
1528     mYuvConversionCache.destroy(this);
1529     mVkFormatDescriptorCountMap.clear();
1530 
1531     mOutsideRenderPassCommandBufferRecycler.onDestroy();
1532     mRenderPassCommandBufferRecycler.onDestroy();
1533 
1534     mImageMemorySuballocator.destroy(this);
1535     mAllocator.destroy();
1536 
1537     // When the renderer is being destroyed, it is possible to check if all the allocated memory
1538     // throughout the execution has been freed.
1539     mMemoryAllocationTracker.onDestroy();
1540 
1541     if (mDevice)
1542     {
1543         vkDestroyDevice(mDevice, nullptr);
1544         mDevice = VK_NULL_HANDLE;
1545     }
1546 
1547     if (mDebugUtilsMessenger)
1548     {
1549         vkDestroyDebugUtilsMessengerEXT(mInstance, mDebugUtilsMessenger, nullptr);
1550     }
1551 
1552     logCacheStats();
1553 
1554     if (mInstance)
1555     {
1556         vkDestroyInstance(mInstance, nullptr);
1557         mInstance = VK_NULL_HANDLE;
1558     }
1559 
1560     if (mCompressEvent)
1561     {
1562         mCompressEvent->wait();
1563         mCompressEvent.reset();
1564     }
1565 
1566     mMemoryProperties.destroy();
1567     mPhysicalDevice = VK_NULL_HANDLE;
1568 
1569     mEnabledInstanceExtensions.clear();
1570     mEnabledDeviceExtensions.clear();
1571 
1572     ASSERT(!hasSharedGarbage());
1573 
1574     if (mLibVulkanLibrary)
1575     {
1576         angle::CloseSystemLibrary(mLibVulkanLibrary);
1577         mLibVulkanLibrary = nullptr;
1578     }
1579 
1580     if (!mPipelineCacheGraph.str().empty())
1581     {
1582         DumpPipelineCacheGraph(this, mPipelineCacheGraph);
1583     }
1584 }
1585 
notifyDeviceLost()1586 void Renderer::notifyDeviceLost()
1587 {
1588     mDeviceLost = true;
1589     mGlobalOps->notifyDeviceLost();
1590 }
1591 
isDeviceLost() const1592 bool Renderer::isDeviceLost() const
1593 {
1594     return mDeviceLost;
1595 }
1596 
enableInstanceExtensions(vk::Context * context,const VulkanLayerVector & enabledInstanceLayerNames,const char * wsiExtension,UseVulkanSwapchain useVulkanSwapchain,bool canLoadDebugUtils)1597 angle::Result Renderer::enableInstanceExtensions(vk::Context *context,
1598                                                  const VulkanLayerVector &enabledInstanceLayerNames,
1599                                                  const char *wsiExtension,
1600                                                  UseVulkanSwapchain useVulkanSwapchain,
1601                                                  bool canLoadDebugUtils)
1602 {
1603     // Enumerate instance extensions that are provided by the vulkan implementation and implicit
1604     // layers.
1605     uint32_t instanceExtensionCount = 0;
1606     {
1607         ANGLE_SCOPED_DISABLE_LSAN();
1608         ANGLE_SCOPED_DISABLE_MSAN();
1609         ANGLE_VK_TRY(context, vkEnumerateInstanceExtensionProperties(
1610                                   nullptr, &instanceExtensionCount, nullptr));
1611     }
1612 
1613     std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
1614     if (instanceExtensionCount > 0)
1615     {
1616         ANGLE_SCOPED_DISABLE_LSAN();
1617         ANGLE_SCOPED_DISABLE_MSAN();
1618         ANGLE_VK_TRY(context, vkEnumerateInstanceExtensionProperties(
1619                                   nullptr, &instanceExtensionCount, instanceExtensionProps.data()));
1620         // In case fewer items were returned than requested, resize instanceExtensionProps to the
1621         // number of extensions returned (i.e. instanceExtensionCount).
1622         instanceExtensionProps.resize(instanceExtensionCount);
1623     }
1624 
1625     // Enumerate instance extensions that are provided by explicit layers.
1626     for (const char *layerName : enabledInstanceLayerNames)
1627     {
1628         uint32_t previousExtensionCount      = static_cast<uint32_t>(instanceExtensionProps.size());
1629         uint32_t instanceLayerExtensionCount = 0;
1630         {
1631             ANGLE_SCOPED_DISABLE_LSAN();
1632             ANGLE_SCOPED_DISABLE_MSAN();
1633             ANGLE_VK_TRY(context, vkEnumerateInstanceExtensionProperties(
1634                                       layerName, &instanceLayerExtensionCount, nullptr));
1635         }
1636         instanceExtensionProps.resize(previousExtensionCount + instanceLayerExtensionCount);
1637         {
1638             ANGLE_SCOPED_DISABLE_LSAN();
1639             ANGLE_SCOPED_DISABLE_MSAN();
1640             ANGLE_VK_TRY(context, vkEnumerateInstanceExtensionProperties(
1641                                       layerName, &instanceLayerExtensionCount,
1642                                       instanceExtensionProps.data() + previousExtensionCount));
1643         }
1644         // In case fewer items were returned than requested, resize instanceExtensionProps to the
1645         // number of extensions returned (i.e. instanceLayerExtensionCount).
1646         instanceExtensionProps.resize(previousExtensionCount + instanceLayerExtensionCount);
1647     }
1648 
1649     // Get the list of instance extensions that are available.
1650     vk::ExtensionNameList instanceExtensionNames;
1651     if (!instanceExtensionProps.empty())
1652     {
1653         for (const VkExtensionProperties &i : instanceExtensionProps)
1654         {
1655             instanceExtensionNames.push_back(i.extensionName);
1656         }
1657         std::sort(instanceExtensionNames.begin(), instanceExtensionNames.end(), StrLess);
1658     }
1659 
1660     // Set ANGLE features that depend on instance extensions
1661     ANGLE_FEATURE_CONDITION(
1662         &mFeatures, supportsSurfaceCapabilities2Extension,
1663         ExtensionFound(VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME, instanceExtensionNames) &&
1664             useVulkanSwapchain == UseVulkanSwapchain::Yes);
1665 
1666     ANGLE_FEATURE_CONDITION(&mFeatures, supportsSurfaceProtectedCapabilitiesExtension,
1667                             ExtensionFound(VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME,
1668                                            instanceExtensionNames) &&
1669                                 useVulkanSwapchain == UseVulkanSwapchain::Yes);
1670 
1671     // TODO: Validation layer has a bug when vkGetPhysicalDeviceSurfaceFormats2KHR is called
1672     // on Mock ICD with surface handle set as VK_NULL_HANDLE. http://anglebug.com/7631
1673     // b/267953710: VK_GOOGLE_surfaceless_query isn't working on some Samsung Xclipse builds
1674     ANGLE_FEATURE_CONDITION(
1675         &mFeatures, supportsSurfacelessQueryExtension,
1676         ExtensionFound(VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME, instanceExtensionNames) &&
1677             useVulkanSwapchain == UseVulkanSwapchain::Yes && !isMockICDEnabled() && !IsXclipse());
1678 
1679     // VK_KHR_external_fence_capabilities and VK_KHR_extenral_semaphore_capabilities are promoted to
1680     // core in Vulkan 1.1
1681     ANGLE_FEATURE_CONDITION(&mFeatures, supportsExternalFenceCapabilities, true);
1682     ANGLE_FEATURE_CONDITION(&mFeatures, supportsExternalSemaphoreCapabilities, true);
1683 
1684     // On macOS, there is no native Vulkan driver, so we need to enable the
1685     // portability enumeration extension to allow use of MoltenVK.
1686     ANGLE_FEATURE_CONDITION(
1687         &mFeatures, supportsPortabilityEnumeration,
1688         ExtensionFound(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME, instanceExtensionNames));
1689 
1690     ANGLE_FEATURE_CONDITION(&mFeatures, enablePortabilityEnumeration,
1691                             mFeatures.supportsPortabilityEnumeration.enabled && IsApple());
1692 
1693     // Enable extensions that could be used
1694     if (useVulkanSwapchain == UseVulkanSwapchain::Yes)
1695     {
1696         mEnabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
1697         if (ExtensionFound(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME, instanceExtensionNames))
1698         {
1699             mEnabledInstanceExtensions.push_back(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME);
1700         }
1701 
1702         ANGLE_FEATURE_CONDITION(
1703             &mFeatures, supportsSurfaceMaintenance1,
1704             !isMockICDEnabled() && ExtensionFound(VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME,
1705                                                   instanceExtensionNames));
1706 
1707         if (mFeatures.supportsSurfaceMaintenance1.enabled)
1708         {
1709             mEnabledInstanceExtensions.push_back(VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME);
1710         }
1711     }
1712 
1713     if (wsiExtension)
1714     {
1715         mEnabledInstanceExtensions.push_back(wsiExtension);
1716     }
1717 
1718     mEnableDebugUtils = canLoadDebugUtils && mEnableValidationLayers &&
1719                         ExtensionFound(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, instanceExtensionNames);
1720 
1721     if (mEnableDebugUtils)
1722     {
1723         mEnabledInstanceExtensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
1724     }
1725 
1726     if (mFeatures.supportsSurfaceCapabilities2Extension.enabled)
1727     {
1728         mEnabledInstanceExtensions.push_back(VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
1729     }
1730 
1731     if (mFeatures.supportsSurfaceProtectedCapabilitiesExtension.enabled)
1732     {
1733         mEnabledInstanceExtensions.push_back(VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME);
1734     }
1735 
1736     if (mFeatures.supportsSurfacelessQueryExtension.enabled)
1737     {
1738         mEnabledInstanceExtensions.push_back(VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME);
1739     }
1740 
1741     if (mFeatures.enablePortabilityEnumeration.enabled)
1742     {
1743         mEnabledInstanceExtensions.push_back(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME);
1744     }
1745 
1746     // Verify the required extensions are in the extension names set. Fail if not.
1747     std::sort(mEnabledInstanceExtensions.begin(), mEnabledInstanceExtensions.end(), StrLess);
1748     ANGLE_VK_TRY(context,
1749                  VerifyExtensionsPresent(instanceExtensionNames, mEnabledInstanceExtensions));
1750 
1751     return angle::Result::Continue;
1752 }
1753 
initialize(vk::Context * context,vk::GlobalOps * globalOps,angle::vk::ICD desiredICD,uint32_t preferredVendorId,uint32_t preferredDeviceId,UseValidationLayers useValidationLayers,const char * wsiExtension,const char * wsiLayer,angle::NativeWindowSystem nativeWindowSystem,const angle::FeatureOverrides & featureOverrides)1754 angle::Result Renderer::initialize(vk::Context *context,
1755                                    vk::GlobalOps *globalOps,
1756                                    angle::vk::ICD desiredICD,
1757                                    uint32_t preferredVendorId,
1758                                    uint32_t preferredDeviceId,
1759                                    UseValidationLayers useValidationLayers,
1760                                    const char *wsiExtension,
1761                                    const char *wsiLayer,
1762                                    angle::NativeWindowSystem nativeWindowSystem,
1763                                    const angle::FeatureOverrides &featureOverrides)
1764 {
1765     bool canLoadDebugUtils = true;
1766 #if defined(ANGLE_SHARED_LIBVULKAN)
1767     {
1768         ANGLE_SCOPED_DISABLE_MSAN();
1769         mLibVulkanLibrary = angle::vk::OpenLibVulkan();
1770         ANGLE_VK_CHECK(context, mLibVulkanLibrary, VK_ERROR_INITIALIZATION_FAILED);
1771 
1772         PFN_vkGetInstanceProcAddr vulkanLoaderGetInstanceProcAddr =
1773             reinterpret_cast<PFN_vkGetInstanceProcAddr>(
1774                 angle::GetLibrarySymbol(mLibVulkanLibrary, "vkGetInstanceProcAddr"));
1775 
1776         // Set all vk* function ptrs
1777         volkInitializeCustom(vulkanLoaderGetInstanceProcAddr);
1778 
1779         uint32_t ver = volkGetInstanceVersion();
1780         if (!IsAndroid() && ver < VK_MAKE_VERSION(1, 1, 91))
1781         {
1782             // http://crbug.com/1205999 - non-Android Vulkan Loader versions before 1.1.91 have a
1783             // bug which prevents loading VK_EXT_debug_utils function pointers.
1784             canLoadDebugUtils = false;
1785         }
1786     }
1787 #endif  // defined(ANGLE_SHARED_LIBVULKAN)
1788 
1789     mGlobalOps = globalOps;
1790 
1791     angle::vk::ScopedVkLoaderEnvironment scopedEnvironment(
1792         useValidationLayers != UseValidationLayers::No, desiredICD);
1793     mEnableValidationLayers = scopedEnvironment.canEnableValidationLayers();
1794     mEnabledICD             = scopedEnvironment.getEnabledICD();
1795 
1796     // Gather global layer properties.
1797     uint32_t instanceLayerCount = 0;
1798     {
1799         ANGLE_SCOPED_DISABLE_LSAN();
1800         ANGLE_SCOPED_DISABLE_MSAN();
1801         ANGLE_VK_TRY(context, vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
1802     }
1803 
1804     std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
1805     if (instanceLayerCount > 0)
1806     {
1807         ANGLE_SCOPED_DISABLE_LSAN();
1808         ANGLE_SCOPED_DISABLE_MSAN();
1809         ANGLE_VK_TRY(context, vkEnumerateInstanceLayerProperties(&instanceLayerCount,
1810                                                                  instanceLayerProps.data()));
1811     }
1812 
1813     VulkanLayerVector enabledInstanceLayerNames;
1814     if (mEnableValidationLayers)
1815     {
1816         const bool layersRequested = useValidationLayers == UseValidationLayers::Yes;
1817         mEnableValidationLayers = GetAvailableValidationLayers(instanceLayerProps, layersRequested,
1818                                                                &enabledInstanceLayerNames);
1819     }
1820 
1821     if (wsiLayer != nullptr)
1822     {
1823         enabledInstanceLayerNames.push_back(wsiLayer);
1824     }
1825 
1826     auto enumerateInstanceVersion = reinterpret_cast<PFN_vkEnumerateInstanceVersion>(
1827         vkGetInstanceProcAddr(nullptr, "vkEnumerateInstanceVersion"));
1828 
1829     uint32_t highestApiVersion = mInstanceVersion = VK_API_VERSION_1_0;
1830     if (enumerateInstanceVersion)
1831     {
1832         {
1833             ANGLE_SCOPED_DISABLE_LSAN();
1834             ANGLE_SCOPED_DISABLE_MSAN();
1835             ANGLE_VK_TRY(context, enumerateInstanceVersion(&mInstanceVersion));
1836         }
1837 
1838         if (IsVulkan11(mInstanceVersion))
1839         {
1840             // This is the highest version of core Vulkan functionality that ANGLE uses.  Per the
1841             // Vulkan spec, the application is allowed to specify a higher version than supported by
1842             // the instance.  ANGLE still respects the *device's* version.
1843             highestApiVersion = kPreferredVulkanAPIVersion;
1844         }
1845     }
1846 
1847     if (mInstanceVersion < kMinimumVulkanAPIVersion)
1848     {
1849         WARN() << "ANGLE Requires a minimum Vulkan instance version of 1.1";
1850         ANGLE_VK_TRY(context, VK_ERROR_INCOMPATIBLE_DRIVER);
1851     }
1852 
1853     const UseVulkanSwapchain useVulkanSwapchain = wsiExtension != nullptr || wsiLayer != nullptr
1854                                                       ? UseVulkanSwapchain::Yes
1855                                                       : UseVulkanSwapchain::No;
1856     ANGLE_TRY(enableInstanceExtensions(context, enabledInstanceLayerNames, wsiExtension,
1857                                        useVulkanSwapchain, canLoadDebugUtils));
1858 
1859     const std::string appName = angle::GetExecutableName();
1860 
1861     mApplicationInfo                    = {};
1862     mApplicationInfo.sType              = VK_STRUCTURE_TYPE_APPLICATION_INFO;
1863     mApplicationInfo.pApplicationName   = appName.c_str();
1864     mApplicationInfo.applicationVersion = 1;
1865     mApplicationInfo.pEngineName        = "ANGLE";
1866     mApplicationInfo.engineVersion      = 1;
1867     mApplicationInfo.apiVersion         = highestApiVersion;
1868 
1869     VkInstanceCreateInfo instanceInfo = {};
1870     instanceInfo.sType                = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
1871     instanceInfo.flags                = 0;
1872     instanceInfo.pApplicationInfo     = &mApplicationInfo;
1873 
1874     // Enable requested layers and extensions.
1875     instanceInfo.enabledExtensionCount = static_cast<uint32_t>(mEnabledInstanceExtensions.size());
1876     instanceInfo.ppEnabledExtensionNames =
1877         mEnabledInstanceExtensions.empty() ? nullptr : mEnabledInstanceExtensions.data();
1878 
1879     instanceInfo.enabledLayerCount   = static_cast<uint32_t>(enabledInstanceLayerNames.size());
1880     instanceInfo.ppEnabledLayerNames = enabledInstanceLayerNames.data();
1881 
1882     // On macOS, there is no native Vulkan driver, so we need to enable the
1883     // portability enumeration extension to allow use of MoltenVK.
1884     if (mFeatures.enablePortabilityEnumeration.enabled)
1885     {
1886         instanceInfo.flags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
1887     }
1888 
1889     // Fine grain control of validation layer features
1890     const char *name                     = "VK_LAYER_KHRONOS_validation";
1891     const VkBool32 setting_validate_core = VK_TRUE;
1892     // SyncVal is very slow (https://github.com/KhronosGroup/Vulkan-ValidationLayers/issues/7285)
1893     // for VkEvent which causes a few tests fail on the bots. Disable syncVal if VkEvent is enabled
1894     // for now.
1895     const VkBool32 setting_validate_sync = IsAndroid() ? VK_FALSE : VK_TRUE;
1896     const VkBool32 setting_thread_safety = VK_TRUE;
1897     // http://anglebug.com/7050 - Shader validation caching is broken on Android
1898     const VkBool32 setting_check_shaders = IsAndroid() ? VK_FALSE : VK_TRUE;
1899     // http://b/316013423 Disable QueueSubmit Synchronization Validation. Lots of failures and some
1900     // test timeout due to https://github.com/KhronosGroup/Vulkan-ValidationLayers/issues/7285
1901     const VkBool32 setting_sync_queue_submit = VK_FALSE;
1902     const VkLayerSettingEXT layerSettings[]  = {
1903         {name, "validate_core", VK_LAYER_SETTING_TYPE_BOOL32_EXT, 1, &setting_validate_core},
1904         {name, "validate_sync", VK_LAYER_SETTING_TYPE_BOOL32_EXT, 1, &setting_validate_sync},
1905         {name, "thread_safety", VK_LAYER_SETTING_TYPE_BOOL32_EXT, 1, &setting_thread_safety},
1906         {name, "check_shaders", VK_LAYER_SETTING_TYPE_BOOL32_EXT, 1, &setting_check_shaders},
1907         {name, "sync_queue_submit", VK_LAYER_SETTING_TYPE_BOOL32_EXT, 1,
1908           &setting_sync_queue_submit},
1909     };
1910     VkLayerSettingsCreateInfoEXT layerSettingsCreateInfo = {
1911         VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT, nullptr,
1912         static_cast<uint32_t>(std::size(layerSettings)), layerSettings};
1913     if (mEnableValidationLayers)
1914     {
1915         vk::AddToPNextChain(&instanceInfo, &layerSettingsCreateInfo);
1916     }
1917 
1918     {
1919         ANGLE_SCOPED_DISABLE_MSAN();
1920         ANGLE_VK_TRY(context, vkCreateInstance(&instanceInfo, nullptr, &mInstance));
1921 #if defined(ANGLE_SHARED_LIBVULKAN)
1922         // Load volk if we are linking dynamically
1923         volkLoadInstance(mInstance);
1924 #endif  // defined(ANGLE_SHARED_LIBVULKAN)
1925 
1926         initInstanceExtensionEntryPoints();
1927     }
1928 
1929     if (mEnableDebugUtils)
1930     {
1931         // Use the newer EXT_debug_utils if it exists.
1932 #if !defined(ANGLE_SHARED_LIBVULKAN)
1933         InitDebugUtilsEXTFunctions(mInstance);
1934 #endif  // !defined(ANGLE_SHARED_LIBVULKAN)
1935 
1936         // Create the messenger callback.
1937         VkDebugUtilsMessengerCreateInfoEXT messengerInfo = {};
1938 
1939         constexpr VkDebugUtilsMessageSeverityFlagsEXT kSeveritiesToLog =
1940             VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT |
1941             VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
1942 
1943         constexpr VkDebugUtilsMessageTypeFlagsEXT kMessagesToLog =
1944             VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
1945             VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
1946             VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
1947 
1948         messengerInfo.sType           = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
1949         messengerInfo.messageSeverity = kSeveritiesToLog;
1950         messengerInfo.messageType     = kMessagesToLog;
1951         messengerInfo.pfnUserCallback = &DebugUtilsMessenger;
1952         messengerInfo.pUserData       = this;
1953 
1954         ANGLE_VK_TRY(context, vkCreateDebugUtilsMessengerEXT(mInstance, &messengerInfo, nullptr,
1955                                                              &mDebugUtilsMessenger));
1956     }
1957 
1958     uint32_t physicalDeviceCount = 0;
1959     ANGLE_VK_TRY(context, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
1960     ANGLE_VK_CHECK(context, physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
1961 
1962     // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
1963     std::vector<VkPhysicalDevice> physicalDevices(physicalDeviceCount);
1964     ANGLE_VK_TRY(context, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount,
1965                                                      physicalDevices.data()));
1966     ChoosePhysicalDevice(vkGetPhysicalDeviceProperties, physicalDevices, mEnabledICD,
1967                          preferredVendorId, preferredDeviceId, &mPhysicalDevice,
1968                          &mPhysicalDeviceProperties);
1969 
1970     // The device version that is assumed by ANGLE is the minimum of the actual device version and
1971     // the highest it's allowed to use.
1972     mDeviceVersion = std::min(mPhysicalDeviceProperties.apiVersion, highestApiVersion);
1973 
1974     if (mDeviceVersion < kMinimumVulkanAPIVersion)
1975     {
1976         WARN() << "ANGLE Requires a minimum Vulkan device version of 1.1";
1977         ANGLE_VK_TRY(context, VK_ERROR_INCOMPATIBLE_DRIVER);
1978     }
1979 
1980     mGarbageCollectionFlushThreshold =
1981         static_cast<uint32_t>(mPhysicalDeviceProperties.limits.maxMemoryAllocationCount *
1982                               kPercentMaxMemoryAllocationCount);
1983     vkGetPhysicalDeviceFeatures(mPhysicalDevice, &mPhysicalDeviceFeatures);
1984 
1985     // Ensure we can find a graphics queue family.
1986     uint32_t queueFamilyCount = 0;
1987     vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueFamilyCount, nullptr);
1988 
1989     ANGLE_VK_CHECK(context, queueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
1990 
1991     mQueueFamilyProperties.resize(queueFamilyCount);
1992     vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueFamilyCount,
1993                                              mQueueFamilyProperties.data());
1994 
1995     uint32_t queueFamilyMatchCount = 0;
1996     // Try first for a protected graphics queue family
1997     uint32_t firstGraphicsQueueFamily = vk::QueueFamily::FindIndex(
1998         mQueueFamilyProperties,
1999         (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_PROTECTED_BIT), 0,
2000         &queueFamilyMatchCount);
2001     // else just a graphics queue family
2002     if (queueFamilyMatchCount == 0)
2003     {
2004         firstGraphicsQueueFamily = vk::QueueFamily::FindIndex(
2005             mQueueFamilyProperties, (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT), 0,
2006             &queueFamilyMatchCount);
2007     }
2008     ANGLE_VK_CHECK(context, queueFamilyMatchCount > 0, VK_ERROR_INITIALIZATION_FAILED);
2009 
2010     // Store the physical device memory properties so we can find the right memory pools.
2011     mMemoryProperties.init(mPhysicalDevice);
2012     ANGLE_VK_CHECK(context, mMemoryProperties.getMemoryTypeCount() > 0,
2013                    VK_ERROR_INITIALIZATION_FAILED);
2014 
2015     // The counters for the memory allocation tracker should be initialized.
2016     // Each memory allocation could be made in one of the available memory heaps. We initialize the
2017     // per-heap memory allocation trackers for MemoryAllocationType objects here, after
2018     // mMemoryProperties has been set up.
2019     mMemoryAllocationTracker.initMemoryTrackers();
2020 
2021     // Determine the threshold for pending garbage sizes.
2022     calculatePendingGarbageSizeLimit();
2023 
2024     ANGLE_TRY(
2025         setupDevice(context, featureOverrides, wsiLayer, useVulkanSwapchain, nativeWindowSystem));
2026 
2027     // If only one queue family, that's the only choice and the device is initialize with that.  If
2028     // there is more than one queue, we still create the device with the first queue family and hope
2029     // for the best.  We cannot wait for a window surface to know which supports present because of
2030     // EGL_KHR_surfaceless_context or simply pbuffers.  So far, only MoltenVk seems to expose
2031     // multiple queue families, and using the first queue family is fine with it.
2032     ANGLE_TRY(createDeviceAndQueue(context, firstGraphicsQueueFamily));
2033 
2034     // Initialize the format table.
2035     mFormatTable.initialize(this, &mNativeTextureCaps);
2036 
2037     // Null terminate the extension list returned for EGL_VULKAN_INSTANCE_EXTENSIONS_ANGLE.
2038     mEnabledInstanceExtensions.push_back(nullptr);
2039 
2040     for (vk::ProtectionType protectionType : angle::AllEnums<vk::ProtectionType>())
2041     {
2042         mOneOffCommandPoolMap[protectionType].init(protectionType);
2043     }
2044 
2045     // Initialize place holder descriptor set layout for empty DescriptorSetLayoutDesc
2046     ASSERT(mPlaceHolderDescriptorSetLayout == nullptr);
2047     VkDescriptorSetLayoutCreateInfo createInfo = {};
2048     createInfo.sType        = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
2049     createInfo.flags        = 0;
2050     createInfo.bindingCount = 0;
2051     createInfo.pBindings    = nullptr;
2052 
2053     vk::DescriptorSetLayout newLayout;
2054     ANGLE_VK_TRY(context, newLayout.init(context->getDevice(), createInfo));
2055 
2056     mPlaceHolderDescriptorSetLayout = new vk::RefCountedDescriptorSetLayout(std::move(newLayout));
2057     ASSERT(mPlaceHolderDescriptorSetLayout && mPlaceHolderDescriptorSetLayout->get().valid());
2058 
2059     return angle::Result::Continue;
2060 }
2061 
initializeMemoryAllocator(vk::Context * context)2062 angle::Result Renderer::initializeMemoryAllocator(vk::Context *context)
2063 {
2064     // This number matches Chromium and was picked by looking at memory usage of
2065     // Android apps. The allocator will start making blocks at 1/8 the max size
2066     // and builds up block size as needed before capping at the max set here.
2067     mPreferredLargeHeapBlockSize = 4 * 1024 * 1024;
2068 
2069     // Create VMA allocator
2070     ANGLE_VK_TRY(context,
2071                  mAllocator.init(mPhysicalDevice, mDevice, mInstance, mApplicationInfo.apiVersion,
2072                                  mPreferredLargeHeapBlockSize));
2073 
2074     // Figure out the alignment for default buffer allocations
2075     VkBufferCreateInfo createInfo    = {};
2076     createInfo.sType                 = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
2077     createInfo.flags                 = 0;
2078     createInfo.size                  = 4096;
2079     createInfo.usage                 = GetDefaultBufferUsageFlags(this);
2080     createInfo.sharingMode           = VK_SHARING_MODE_EXCLUSIVE;
2081     createInfo.queueFamilyIndexCount = 0;
2082     createInfo.pQueueFamilyIndices   = nullptr;
2083 
2084     vk::DeviceScoped<vk::Buffer> tempBuffer(mDevice);
2085     tempBuffer.get().init(mDevice, createInfo);
2086 
2087     VkMemoryRequirements defaultBufferMemoryRequirements;
2088     tempBuffer.get().getMemoryRequirements(mDevice, &defaultBufferMemoryRequirements);
2089     ASSERT(gl::isPow2(defaultBufferMemoryRequirements.alignment));
2090 
2091     const VkPhysicalDeviceLimits &limitsVk = getPhysicalDeviceProperties().limits;
2092     ASSERT(gl::isPow2(limitsVk.minUniformBufferOffsetAlignment));
2093     ASSERT(gl::isPow2(limitsVk.minStorageBufferOffsetAlignment));
2094     ASSERT(gl::isPow2(limitsVk.minTexelBufferOffsetAlignment));
2095     ASSERT(gl::isPow2(limitsVk.minMemoryMapAlignment));
2096 
2097     mDefaultBufferAlignment =
2098         std::max({static_cast<size_t>(limitsVk.minUniformBufferOffsetAlignment),
2099                   static_cast<size_t>(limitsVk.minStorageBufferOffsetAlignment),
2100                   static_cast<size_t>(limitsVk.minTexelBufferOffsetAlignment),
2101                   static_cast<size_t>(limitsVk.minMemoryMapAlignment),
2102                   static_cast<size_t>(defaultBufferMemoryRequirements.alignment)});
2103 
2104     // Initialize staging buffer memory type index and alignment.
2105     // These buffers will only be used as transfer sources or transfer targets.
2106     createInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2107     VkMemoryPropertyFlags requiredFlags, preferredFlags;
2108     bool persistentlyMapped = mFeatures.persistentlyMappedBuffers.enabled;
2109 
2110     // Uncached coherent staging buffer.
2111     requiredFlags  = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
2112     preferredFlags = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
2113     ANGLE_VK_TRY(context,
2114                  mAllocator.findMemoryTypeIndexForBufferInfo(
2115                      createInfo, requiredFlags, preferredFlags, persistentlyMapped,
2116                      &mStagingBufferMemoryTypeIndex[vk::MemoryCoherency::UnCachedCoherent]));
2117     ASSERT(mStagingBufferMemoryTypeIndex[vk::MemoryCoherency::UnCachedCoherent] !=
2118            kInvalidMemoryTypeIndex);
2119 
2120     // Cached coherent staging buffer.  Note coherent is preferred but not required, which means we
2121     // may get non-coherent memory type.
2122     requiredFlags  = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
2123     preferredFlags = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
2124     ANGLE_VK_TRY(context,
2125                  mAllocator.findMemoryTypeIndexForBufferInfo(
2126                      createInfo, requiredFlags, preferredFlags, persistentlyMapped,
2127                      &mStagingBufferMemoryTypeIndex[vk::MemoryCoherency::CachedPreferCoherent]));
2128     ASSERT(mStagingBufferMemoryTypeIndex[vk::MemoryCoherency::CachedPreferCoherent] !=
2129            kInvalidMemoryTypeIndex);
2130 
2131     // Cached Non-coherent staging buffer
2132     requiredFlags  = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
2133     preferredFlags = 0;
2134     ANGLE_VK_TRY(context,
2135                  mAllocator.findMemoryTypeIndexForBufferInfo(
2136                      createInfo, requiredFlags, preferredFlags, persistentlyMapped,
2137                      &mStagingBufferMemoryTypeIndex[vk::MemoryCoherency::CachedNonCoherent]));
2138     ASSERT(mStagingBufferMemoryTypeIndex[vk::MemoryCoherency::CachedNonCoherent] !=
2139            kInvalidMemoryTypeIndex);
2140 
2141     // Alignment
2142     mStagingBufferAlignment =
2143         static_cast<size_t>(mPhysicalDeviceProperties.limits.minMemoryMapAlignment);
2144     ASSERT(gl::isPow2(mPhysicalDeviceProperties.limits.nonCoherentAtomSize));
2145     ASSERT(gl::isPow2(mPhysicalDeviceProperties.limits.optimalBufferCopyOffsetAlignment));
2146     // Usually minTexelBufferOffsetAlignment is much smaller than  nonCoherentAtomSize
2147     ASSERT(gl::isPow2(mPhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment));
2148     mStagingBufferAlignment = std::max(
2149         {mStagingBufferAlignment,
2150          static_cast<size_t>(mPhysicalDeviceProperties.limits.optimalBufferCopyOffsetAlignment),
2151          static_cast<size_t>(mPhysicalDeviceProperties.limits.nonCoherentAtomSize),
2152          static_cast<size_t>(mPhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment)});
2153     ASSERT(gl::isPow2(mStagingBufferAlignment));
2154 
2155     // Device local vertex conversion buffer
2156     createInfo.usage = vk::kVertexBufferUsageFlags;
2157     requiredFlags    = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
2158     preferredFlags   = 0;
2159     ANGLE_VK_TRY(context, mAllocator.findMemoryTypeIndexForBufferInfo(
2160                               createInfo, requiredFlags, preferredFlags, persistentlyMapped,
2161                               &mDeviceLocalVertexConversionBufferMemoryTypeIndex));
2162     ASSERT(mDeviceLocalVertexConversionBufferMemoryTypeIndex != kInvalidMemoryTypeIndex);
2163 
2164     // Host visible and non-coherent vertex conversion buffer, which is the same as non-coherent
2165     // staging buffer
2166     mHostVisibleVertexConversionBufferMemoryTypeIndex =
2167         mStagingBufferMemoryTypeIndex[vk::MemoryCoherency::CachedNonCoherent];
2168 
2169     // We may use compute shader to do conversion, so we must meet
2170     // minStorageBufferOffsetAlignment requirement as well. Also take into account non-coherent
2171     // alignment requirements.
2172     mVertexConversionBufferAlignment = std::max(
2173         {vk::kVertexBufferAlignment,
2174          static_cast<size_t>(mPhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment),
2175          static_cast<size_t>(mPhysicalDeviceProperties.limits.nonCoherentAtomSize),
2176          static_cast<size_t>(defaultBufferMemoryRequirements.alignment)});
2177     ASSERT(gl::isPow2(mVertexConversionBufferAlignment));
2178 
2179     return angle::Result::Continue;
2180 }
2181 
2182 // The following features and properties are not promoted to any core Vulkan versions (up to Vulkan
2183 // 1.3):
2184 //
2185 // - VK_EXT_line_rasterization:                        bresenhamLines (feature)
2186 // - VK_EXT_provoking_vertex:                          provokingVertexLast (feature)
2187 // - VK_EXT_vertex_attribute_divisor:                  vertexAttributeInstanceRateDivisor (feature),
2188 //                                                     maxVertexAttribDivisor (property)
2189 // - VK_EXT_transform_feedback:                        transformFeedback (feature),
2190 //                                                     geometryStreams (feature)
2191 // - VK_EXT_index_type_uint8:                          indexTypeUint8 (feature)
2192 // - VK_EXT_device_memory_report:                      deviceMemoryReport (feature)
2193 // - VK_EXT_multisampled_render_to_single_sampled:     multisampledRenderToSingleSampled (feature)
2194 // - VK_EXT_image_2d_view_of_3d:                       image2DViewOf3D (feature)
2195 //                                                     sampler2DViewOf3D (feature)
2196 // - VK_EXT_custom_border_color:                       customBorderColors (feature)
2197 //                                                     customBorderColorWithoutFormat (feature)
2198 // - VK_EXT_depth_clamp_zero_one:                      depthClampZeroOne (feature)
2199 // - VK_EXT_depth_clip_control:                        depthClipControl (feature)
2200 // - VK_EXT_primitives_generated_query:                primitivesGeneratedQuery (feature),
2201 //                                                     primitivesGeneratedQueryWithRasterizerDiscard
2202 //                                                                                        (property)
2203 // - VK_EXT_primitive_topology_list_restart:           primitiveTopologyListRestart (feature)
2204 // - VK_EXT_graphics_pipeline_library:                 graphicsPipelineLibrary (feature),
2205 //                                                     graphicsPipelineLibraryFastLinking (property)
2206 // - VK_KHR_fragment_shading_rate:                     pipelineFragmentShadingRate (feature)
2207 // - VK_EXT_fragment_shader_interlock:                 fragmentShaderPixelInterlock (feature)
2208 // - VK_EXT_pipeline_robustness:                       pipelineRobustness (feature)
2209 // - VK_EXT_pipeline_protected_access:                 pipelineProtectedAccess (feature)
2210 // - VK_EXT_rasterization_order_attachment_access or
2211 //   VK_ARM_rasterization_order_attachment_access:     rasterizationOrderColorAttachmentAccess
2212 //                                                                                   (feature)
2213 // - VK_EXT_swapchain_maintenance1:                    swapchainMaintenance1 (feature)
2214 // - VK_EXT_legacy_dithering:                          supportsLegacyDithering (feature)
2215 // - VK_EXT_physical_device_drm:                       hasPrimary (property),
2216 //                                                     hasRender (property)
2217 // - VK_EXT_host_image_copy:                           hostImageCopy (feature),
2218 //                                                     pCopySrcLayouts (property),
2219 //                                                     pCopyDstLayouts (property),
2220 //                                                     identicalMemoryTypeRequirements (property)
2221 // - VK_ANDROID_external_format_resolve:               externalFormatResolve (feature)
2222 // - VK_EXT_vertex_input_dynamic_state:                vertexInputDynamicState (feature)
2223 // - VK_KHR_dynamic_rendering_local_read:              dynamicRenderingLocalRead (feature)
2224 //
appendDeviceExtensionFeaturesNotPromoted(const vk::ExtensionNameList & deviceExtensionNames,VkPhysicalDeviceFeatures2KHR * deviceFeatures,VkPhysicalDeviceProperties2 * deviceProperties)2225 void Renderer::appendDeviceExtensionFeaturesNotPromoted(
2226     const vk::ExtensionNameList &deviceExtensionNames,
2227     VkPhysicalDeviceFeatures2KHR *deviceFeatures,
2228     VkPhysicalDeviceProperties2 *deviceProperties)
2229 {
2230     if (ExtensionFound(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME, deviceExtensionNames))
2231     {
2232         vk::AddToPNextChain(deviceFeatures, &mLineRasterizationFeatures);
2233     }
2234 
2235     if (ExtensionFound(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME, deviceExtensionNames))
2236     {
2237         vk::AddToPNextChain(deviceFeatures, &mProvokingVertexFeatures);
2238     }
2239 
2240     if (ExtensionFound(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME, deviceExtensionNames))
2241     {
2242         vk::AddToPNextChain(deviceFeatures, &mVertexAttributeDivisorFeatures);
2243         vk::AddToPNextChain(deviceProperties, &mVertexAttributeDivisorProperties);
2244     }
2245 
2246     if (ExtensionFound(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME, deviceExtensionNames))
2247     {
2248         vk::AddToPNextChain(deviceFeatures, &mTransformFeedbackFeatures);
2249     }
2250 
2251     if (ExtensionFound(VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME, deviceExtensionNames))
2252     {
2253         vk::AddToPNextChain(deviceFeatures, &mIndexTypeUint8Features);
2254     }
2255 
2256     if (ExtensionFound(VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME, deviceExtensionNames))
2257     {
2258         vk::AddToPNextChain(deviceFeatures, &mMemoryReportFeatures);
2259     }
2260 
2261     if (ExtensionFound(VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME,
2262                        deviceExtensionNames))
2263     {
2264         vk::AddToPNextChain(deviceFeatures, &mMultisampledRenderToSingleSampledFeatures);
2265     }
2266 
2267     if (ExtensionFound(VK_EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME, deviceExtensionNames))
2268     {
2269         vk::AddToPNextChain(deviceFeatures, &mImage2dViewOf3dFeatures);
2270     }
2271 
2272     if (ExtensionFound(VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME, deviceExtensionNames))
2273     {
2274         vk::AddToPNextChain(deviceFeatures, &mCustomBorderColorFeatures);
2275     }
2276 
2277     if (ExtensionFound(VK_EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME, deviceExtensionNames))
2278     {
2279         vk::AddToPNextChain(deviceFeatures, &mDepthClampZeroOneFeatures);
2280     }
2281 
2282     if (ExtensionFound(VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME, deviceExtensionNames))
2283     {
2284         vk::AddToPNextChain(deviceFeatures, &mDepthClipControlFeatures);
2285     }
2286 
2287     if (ExtensionFound(VK_EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME, deviceExtensionNames))
2288     {
2289         vk::AddToPNextChain(deviceFeatures, &mPrimitivesGeneratedQueryFeatures);
2290     }
2291 
2292     if (ExtensionFound(VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME, deviceExtensionNames))
2293     {
2294         vk::AddToPNextChain(deviceFeatures, &mPrimitiveTopologyListRestartFeatures);
2295     }
2296 
2297     if (ExtensionFound(VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME, deviceExtensionNames))
2298     {
2299         vk::AddToPNextChain(deviceFeatures, &mGraphicsPipelineLibraryFeatures);
2300         vk::AddToPNextChain(deviceProperties, &mGraphicsPipelineLibraryProperties);
2301     }
2302 
2303     if (ExtensionFound(VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME, deviceExtensionNames))
2304     {
2305         vk::AddToPNextChain(deviceFeatures, &mFragmentShadingRateFeatures);
2306         vk::AddToPNextChain(deviceProperties, &mFragmentShadingRateProperties);
2307     }
2308 
2309     if (ExtensionFound(VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME, deviceExtensionNames))
2310     {
2311         vk::AddToPNextChain(deviceFeatures, &mFragmentShaderInterlockFeatures);
2312     }
2313 
2314     if (ExtensionFound(VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME, deviceExtensionNames))
2315     {
2316         vk::AddToPNextChain(deviceFeatures, &mPipelineRobustnessFeatures);
2317     }
2318 
2319     if (ExtensionFound(VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME, deviceExtensionNames))
2320     {
2321         vk::AddToPNextChain(deviceFeatures, &mPipelineProtectedAccessFeatures);
2322     }
2323 
2324     // The EXT and ARM versions are interchangeable. The structs and enums alias each other.
2325     if (ExtensionFound(VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME,
2326                        deviceExtensionNames))
2327     {
2328         vk::AddToPNextChain(deviceFeatures, &mRasterizationOrderAttachmentAccessFeatures);
2329     }
2330     else if (ExtensionFound(VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME,
2331                             deviceExtensionNames))
2332     {
2333         vk::AddToPNextChain(deviceFeatures, &mRasterizationOrderAttachmentAccessFeatures);
2334     }
2335 
2336     if (ExtensionFound(VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME, deviceExtensionNames))
2337     {
2338         vk::AddToPNextChain(deviceFeatures, &mSwapchainMaintenance1Features);
2339     }
2340 
2341     if (ExtensionFound(VK_EXT_LEGACY_DITHERING_EXTENSION_NAME, deviceExtensionNames))
2342     {
2343         vk::AddToPNextChain(deviceFeatures, &mDitheringFeatures);
2344     }
2345 
2346     if (ExtensionFound(VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME, deviceExtensionNames))
2347     {
2348         vk::AddToPNextChain(deviceProperties, &mDrmProperties);
2349     }
2350 
2351     if (ExtensionFound(VK_EXT_HOST_IMAGE_COPY_EXTENSION_NAME, deviceExtensionNames))
2352     {
2353         // VkPhysicalDeviceHostImageCopyPropertiesEXT has a count + array query.  Typically, that
2354         // requires getting the properties once with a nullptr array, to get the count, and then
2355         // again with an array of that size.  For simplicity, ANGLE just uses an array that's big
2356         // enough.  If that array goes terribly large in the future, ANGLE may lose knowledge of
2357         // some likely esoteric layouts, which doesn't really matter.
2358         constexpr uint32_t kMaxLayoutCount = 50;
2359         mHostImageCopySrcLayoutsStorage.resize(kMaxLayoutCount, VK_IMAGE_LAYOUT_UNDEFINED);
2360         mHostImageCopyDstLayoutsStorage.resize(kMaxLayoutCount, VK_IMAGE_LAYOUT_UNDEFINED);
2361         mHostImageCopyProperties.copySrcLayoutCount = kMaxLayoutCount;
2362         mHostImageCopyProperties.copyDstLayoutCount = kMaxLayoutCount;
2363         mHostImageCopyProperties.pCopySrcLayouts    = mHostImageCopySrcLayoutsStorage.data();
2364         mHostImageCopyProperties.pCopyDstLayouts    = mHostImageCopyDstLayoutsStorage.data();
2365 
2366         vk::AddToPNextChain(deviceFeatures, &mHostImageCopyFeatures);
2367         vk::AddToPNextChain(deviceProperties, &mHostImageCopyProperties);
2368     }
2369 
2370     if (ExtensionFound(VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME, deviceExtensionNames))
2371     {
2372         vk::AddToPNextChain(deviceFeatures, &mVertexInputDynamicStateFeatures);
2373     }
2374 
2375 #if defined(ANGLE_PLATFORM_ANDROID)
2376     if (ExtensionFound(VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_EXTENSION_NAME, deviceExtensionNames))
2377     {
2378         vk::AddToPNextChain(deviceFeatures, &mExternalFormatResolveFeatures);
2379         vk::AddToPNextChain(deviceProperties, &mExternalFormatResolveProperties);
2380     }
2381 #endif
2382 
2383     if (ExtensionFound(VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_EXTENSION_NAME, deviceExtensionNames))
2384     {
2385         vk::AddToPNextChain(deviceFeatures, &mDynamicRenderingLocalReadFeatures);
2386     }
2387 }
2388 
2389 // The following features and properties used by ANGLE have been promoted to Vulkan 1.1:
2390 //
2391 // - (unpublished VK_KHR_subgroup):         supportedStages (property),
2392 //                                          supportedOperations (property)
2393 // - (unpublished VK_KHR_protected_memory): protectedMemory (feature)
2394 // - VK_KHR_sampler_ycbcr_conversion:       samplerYcbcrConversion (feature)
2395 // - VK_KHR_multiview:                      multiview (feature),
2396 //                                          maxMultiviewViewCount (property)
2397 // - VK_KHR_16bit_storage                   storageBuffer16BitAccess (feature)
2398 //                                          uniformAndStorageBuffer16BitAccess (feature)
2399 //                                          storagePushConstant16 (feature)
2400 //                                          storageInputOutput16 (feature)
2401 //
2402 //
2403 // Note that subgroup and protected memory features and properties came from unpublished extensions
2404 // and are core in Vulkan 1.1.
2405 //
appendDeviceExtensionFeaturesPromotedTo11(const vk::ExtensionNameList & deviceExtensionNames,VkPhysicalDeviceFeatures2KHR * deviceFeatures,VkPhysicalDeviceProperties2 * deviceProperties)2406 void Renderer::appendDeviceExtensionFeaturesPromotedTo11(
2407     const vk::ExtensionNameList &deviceExtensionNames,
2408     VkPhysicalDeviceFeatures2KHR *deviceFeatures,
2409     VkPhysicalDeviceProperties2 *deviceProperties)
2410 {
2411     vk::AddToPNextChain(deviceProperties, &mSubgroupProperties);
2412     vk::AddToPNextChain(deviceFeatures, &mProtectedMemoryFeatures);
2413 
2414     if (ExtensionFound(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, deviceExtensionNames))
2415     {
2416         vk::AddToPNextChain(deviceFeatures, &mSamplerYcbcrConversionFeatures);
2417     }
2418 
2419     if (ExtensionFound(VK_KHR_MULTIVIEW_EXTENSION_NAME, deviceExtensionNames))
2420     {
2421         vk::AddToPNextChain(deviceFeatures, &mMultiviewFeatures);
2422         vk::AddToPNextChain(deviceProperties, &mMultiviewProperties);
2423     }
2424     if (ExtensionFound(VK_KHR_16BIT_STORAGE_EXTENSION_NAME, deviceExtensionNames))
2425     {
2426         vk::AddToPNextChain(deviceFeatures, &m16BitStorageFeatures);
2427     }
2428 }
2429 
2430 // The following features and properties used by ANGLE have been promoted to Vulkan 1.2:
2431 //
2432 // - VK_KHR_shader_float16_int8:            shaderFloat16 (feature)
2433 // - VK_KHR_depth_stencil_resolve:          supportedDepthResolveModes (property),
2434 //                                          independentResolveNone (property)
2435 // - VK_KHR_driver_properties:              driverName (property),
2436 //                                          driverID (property)
2437 // - VK_KHR_shader_subgroup_extended_types: shaderSubgroupExtendedTypes (feature)
2438 // - VK_EXT_host_query_reset:               hostQueryReset (feature)
2439 // - VK_KHR_imageless_framebuffer:          imagelessFramebuffer (feature)
2440 // - VK_KHR_timeline_semaphore:             timelineSemaphore (feature)
2441 // - VK_KHR_8bit_storage                    storageBuffer8BitAccess (feature)
2442 //                                          uniformAndStorageBuffer8BitAccess (feature)
2443 //                                          storagePushConstant8 (feature)
2444 //
2445 // Note that supportedDepthResolveModes is used just to check if the property struct is populated.
2446 // ANGLE always uses VK_RESOLVE_MODE_SAMPLE_ZERO_BIT for both depth and stencil, and support for
2447 // this bit is mandatory as long as the extension (or Vulkan 1.2) exists.
2448 //
appendDeviceExtensionFeaturesPromotedTo12(const vk::ExtensionNameList & deviceExtensionNames,VkPhysicalDeviceFeatures2KHR * deviceFeatures,VkPhysicalDeviceProperties2 * deviceProperties)2449 void Renderer::appendDeviceExtensionFeaturesPromotedTo12(
2450     const vk::ExtensionNameList &deviceExtensionNames,
2451     VkPhysicalDeviceFeatures2KHR *deviceFeatures,
2452     VkPhysicalDeviceProperties2 *deviceProperties)
2453 {
2454     if (ExtensionFound(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME, deviceExtensionNames))
2455     {
2456         vk::AddToPNextChain(deviceFeatures, &mShaderFloat16Int8Features);
2457     }
2458 
2459     if (ExtensionFound(VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME, deviceExtensionNames))
2460     {
2461         vk::AddToPNextChain(deviceProperties, &mDepthStencilResolveProperties);
2462     }
2463 
2464     if (ExtensionFound(VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME, deviceExtensionNames))
2465     {
2466         vk::AddToPNextChain(deviceProperties, &mDriverProperties);
2467     }
2468 
2469     if (ExtensionFound(VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME, deviceExtensionNames))
2470     {
2471         vk::AddToPNextChain(deviceFeatures, &mSubgroupExtendedTypesFeatures);
2472     }
2473 
2474     if (ExtensionFound(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME, deviceExtensionNames))
2475     {
2476         vk::AddToPNextChain(deviceFeatures, &mHostQueryResetFeatures);
2477     }
2478 
2479     if (ExtensionFound(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME, deviceExtensionNames))
2480     {
2481         vk::AddToPNextChain(deviceFeatures, &mImagelessFramebufferFeatures);
2482     }
2483 
2484     if (ExtensionFound(VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME, deviceExtensionNames))
2485     {
2486         vk::AddToPNextChain(deviceFeatures, &mTimelineSemaphoreFeatures);
2487     }
2488 
2489     if (ExtensionFound(VK_KHR_8BIT_STORAGE_EXTENSION_NAME, deviceExtensionNames))
2490     {
2491         vk::AddToPNextChain(deviceFeatures, &m8BitStorageFeatures);
2492     }
2493 }
2494 
2495 // The following features and properties used by ANGLE have been promoted to Vulkan 1.3:
2496 //
2497 // - VK_EXT_extended_dynamic_state:          extendedDynamicState (feature)
2498 // - VK_EXT_extended_dynamic_state2:         extendedDynamicState2 (feature),
2499 //                                           extendedDynamicState2LogicOp (feature)
2500 // - VK_KHR_synchronization2:                synchronization2 (feature)
2501 // - VK_KHR_dynamic_rendering:               dynamicRendering (feature)
2502 //
2503 // Note that VK_EXT_extended_dynamic_state2 is partially promoted to Vulkan 1.3.  If ANGLE creates a
2504 // Vulkan 1.3 device, it would still need to enable this extension separately for
2505 // extendedDynamicState2LogicOp.
2506 //
appendDeviceExtensionFeaturesPromotedTo13(const vk::ExtensionNameList & deviceExtensionNames,VkPhysicalDeviceFeatures2KHR * deviceFeatures,VkPhysicalDeviceProperties2 * deviceProperties)2507 void Renderer::appendDeviceExtensionFeaturesPromotedTo13(
2508     const vk::ExtensionNameList &deviceExtensionNames,
2509     VkPhysicalDeviceFeatures2KHR *deviceFeatures,
2510     VkPhysicalDeviceProperties2 *deviceProperties)
2511 {
2512     if (ExtensionFound(VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME, deviceExtensionNames))
2513     {
2514         vk::AddToPNextChain(deviceFeatures, &mExtendedDynamicStateFeatures);
2515     }
2516 
2517     if (ExtensionFound(VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME, deviceExtensionNames))
2518     {
2519         vk::AddToPNextChain(deviceFeatures, &mExtendedDynamicState2Features);
2520     }
2521 
2522     if (ExtensionFound(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME, deviceExtensionNames))
2523     {
2524         vk::AddToPNextChain(deviceFeatures, &mSynchronization2Features);
2525     }
2526 
2527     if (ExtensionFound(VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME, deviceExtensionNames))
2528     {
2529         vk::AddToPNextChain(deviceFeatures, &mDynamicRenderingFeatures);
2530     }
2531 }
2532 
queryDeviceExtensionFeatures(const vk::ExtensionNameList & deviceExtensionNames)2533 void Renderer::queryDeviceExtensionFeatures(const vk::ExtensionNameList &deviceExtensionNames)
2534 {
2535     // Default initialize all extension features to false.
2536     mPhysicalDevice11Properties       = {};
2537     mPhysicalDevice11Properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES;
2538 
2539     mPhysicalDevice11Features       = {};
2540     mPhysicalDevice11Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES;
2541 
2542     mLineRasterizationFeatures = {};
2543     mLineRasterizationFeatures.sType =
2544         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT;
2545 
2546     mProvokingVertexFeatures = {};
2547     mProvokingVertexFeatures.sType =
2548         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT;
2549 
2550     mVertexAttributeDivisorFeatures = {};
2551     mVertexAttributeDivisorFeatures.sType =
2552         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
2553 
2554     mVertexAttributeDivisorProperties = {};
2555     mVertexAttributeDivisorProperties.sType =
2556         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT;
2557 
2558     mTransformFeedbackFeatures = {};
2559     mTransformFeedbackFeatures.sType =
2560         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT;
2561 
2562     mIndexTypeUint8Features       = {};
2563     mIndexTypeUint8Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT;
2564 
2565     mSubgroupProperties       = {};
2566     mSubgroupProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES;
2567 
2568     mSubgroupExtendedTypesFeatures = {};
2569     mSubgroupExtendedTypesFeatures.sType =
2570         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES;
2571 
2572     mMemoryReportFeatures = {};
2573     mMemoryReportFeatures.sType =
2574         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT;
2575 
2576     mShaderFloat16Int8Features = {};
2577     mShaderFloat16Int8Features.sType =
2578         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES;
2579 
2580     mDepthStencilResolveProperties = {};
2581     mDepthStencilResolveProperties.sType =
2582         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES;
2583 
2584     mCustomBorderColorFeatures = {};
2585     mCustomBorderColorFeatures.sType =
2586         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT;
2587 
2588     mMultisampledRenderToSingleSampledFeatures = {};
2589     mMultisampledRenderToSingleSampledFeatures.sType =
2590         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT;
2591 
2592     mImage2dViewOf3dFeatures = {};
2593     mImage2dViewOf3dFeatures.sType =
2594         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT;
2595 
2596     mMultiviewFeatures       = {};
2597     mMultiviewFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES;
2598 
2599     mMultiviewProperties       = {};
2600     mMultiviewProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES;
2601 
2602     mDriverProperties       = {};
2603     mDriverProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES;
2604 
2605     mSamplerYcbcrConversionFeatures = {};
2606     mSamplerYcbcrConversionFeatures.sType =
2607         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES;
2608 
2609     mProtectedMemoryFeatures       = {};
2610     mProtectedMemoryFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES;
2611 
2612     mHostQueryResetFeatures       = {};
2613     mHostQueryResetFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
2614 
2615     mDepthClampZeroOneFeatures = {};
2616     mDepthClampZeroOneFeatures.sType =
2617         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT;
2618 
2619     mDepthClipControlFeatures = {};
2620     mDepthClipControlFeatures.sType =
2621         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT;
2622 
2623     mPrimitivesGeneratedQueryFeatures = {};
2624     mPrimitivesGeneratedQueryFeatures.sType =
2625         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT;
2626 
2627     mPrimitiveTopologyListRestartFeatures = {};
2628     mPrimitiveTopologyListRestartFeatures.sType =
2629         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT;
2630 
2631     mExtendedDynamicStateFeatures = {};
2632     mExtendedDynamicStateFeatures.sType =
2633         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT;
2634 
2635     mExtendedDynamicState2Features = {};
2636     mExtendedDynamicState2Features.sType =
2637         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT;
2638 
2639     mGraphicsPipelineLibraryFeatures = {};
2640     mGraphicsPipelineLibraryFeatures.sType =
2641         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT;
2642 
2643     mGraphicsPipelineLibraryProperties = {};
2644     mGraphicsPipelineLibraryProperties.sType =
2645         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT;
2646 
2647     mVertexInputDynamicStateFeatures = {};
2648     mVertexInputDynamicStateFeatures.sType =
2649         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT;
2650 
2651     mDynamicRenderingFeatures = {};
2652     mDynamicRenderingFeatures.sType =
2653         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR;
2654 
2655     mDynamicRenderingLocalReadFeatures = {};
2656     mDynamicRenderingLocalReadFeatures.sType =
2657         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR;
2658 
2659     mFragmentShadingRateFeatures = {};
2660     mFragmentShadingRateFeatures.sType =
2661         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR;
2662 
2663     mFragmentShadingRateProperties = {};
2664     mFragmentShadingRateProperties.sType =
2665         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR;
2666 
2667     mFragmentShaderInterlockFeatures = {};
2668     mFragmentShaderInterlockFeatures.sType =
2669         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT;
2670 
2671     mImagelessFramebufferFeatures = {};
2672     mImagelessFramebufferFeatures.sType =
2673         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR;
2674 
2675     mPipelineRobustnessFeatures = {};
2676     mPipelineRobustnessFeatures.sType =
2677         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT;
2678 
2679     mPipelineProtectedAccessFeatures = {};
2680     mPipelineProtectedAccessFeatures.sType =
2681         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT;
2682 
2683     mRasterizationOrderAttachmentAccessFeatures = {};
2684     mRasterizationOrderAttachmentAccessFeatures.sType =
2685         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT;
2686 
2687     mSwapchainMaintenance1Features = {};
2688     mSwapchainMaintenance1Features.sType =
2689         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT;
2690 
2691     mDitheringFeatures       = {};
2692     mDitheringFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT;
2693 
2694     mDrmProperties       = {};
2695     mDrmProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT;
2696 
2697     mTimelineSemaphoreFeatures = {};
2698     mTimelineSemaphoreFeatures.sType =
2699         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR;
2700 
2701     mHostImageCopyFeatures       = {};
2702     mHostImageCopyFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT;
2703 
2704     mHostImageCopyProperties = {};
2705     mHostImageCopyProperties.sType =
2706         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT;
2707 
2708     m8BitStorageFeatures       = {};
2709     m8BitStorageFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR;
2710 
2711     m16BitStorageFeatures       = {};
2712     m16BitStorageFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR;
2713 
2714     mSynchronization2Features       = {};
2715     mSynchronization2Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES;
2716 
2717 #if defined(ANGLE_PLATFORM_ANDROID)
2718     mExternalFormatResolveFeatures = {};
2719     mExternalFormatResolveFeatures.sType =
2720         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID;
2721 
2722     mExternalFormatResolveProperties = {};
2723     mExternalFormatResolveProperties.sType =
2724         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID;
2725 #endif
2726 
2727     // Query features and properties.
2728     VkPhysicalDeviceFeatures2KHR deviceFeatures = {};
2729     deviceFeatures.sType                        = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
2730 
2731     VkPhysicalDeviceProperties2 deviceProperties = {};
2732     deviceProperties.sType                       = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
2733 
2734     appendDeviceExtensionFeaturesNotPromoted(deviceExtensionNames, &deviceFeatures,
2735                                              &deviceProperties);
2736     appendDeviceExtensionFeaturesPromotedTo11(deviceExtensionNames, &deviceFeatures,
2737                                               &deviceProperties);
2738     appendDeviceExtensionFeaturesPromotedTo12(deviceExtensionNames, &deviceFeatures,
2739                                               &deviceProperties);
2740     appendDeviceExtensionFeaturesPromotedTo13(deviceExtensionNames, &deviceFeatures,
2741                                               &deviceProperties);
2742 
2743     vkGetPhysicalDeviceFeatures2(mPhysicalDevice, &deviceFeatures);
2744     vkGetPhysicalDeviceProperties2(mPhysicalDevice, &deviceProperties);
2745 
2746     // Clean up pNext chains
2747     mPhysicalDevice11Properties.pNext                 = nullptr;
2748     mPhysicalDevice11Features.pNext                   = nullptr;
2749     mLineRasterizationFeatures.pNext                  = nullptr;
2750     mMemoryReportFeatures.pNext                       = nullptr;
2751     mProvokingVertexFeatures.pNext                    = nullptr;
2752     mVertexAttributeDivisorFeatures.pNext             = nullptr;
2753     mVertexAttributeDivisorProperties.pNext           = nullptr;
2754     mTransformFeedbackFeatures.pNext                  = nullptr;
2755     mIndexTypeUint8Features.pNext                     = nullptr;
2756     mSubgroupProperties.pNext                         = nullptr;
2757     mSubgroupExtendedTypesFeatures.pNext              = nullptr;
2758     mCustomBorderColorFeatures.pNext                  = nullptr;
2759     mShaderFloat16Int8Features.pNext                  = nullptr;
2760     mDepthStencilResolveProperties.pNext              = nullptr;
2761     mMultisampledRenderToSingleSampledFeatures.pNext  = nullptr;
2762     mImage2dViewOf3dFeatures.pNext                    = nullptr;
2763     mMultiviewFeatures.pNext                          = nullptr;
2764     mMultiviewProperties.pNext                        = nullptr;
2765     mDriverProperties.pNext                           = nullptr;
2766     mSamplerYcbcrConversionFeatures.pNext             = nullptr;
2767     mProtectedMemoryFeatures.pNext                    = nullptr;
2768     mHostQueryResetFeatures.pNext                     = nullptr;
2769     mDepthClampZeroOneFeatures.pNext                  = nullptr;
2770     mDepthClipControlFeatures.pNext                   = nullptr;
2771     mPrimitivesGeneratedQueryFeatures.pNext           = nullptr;
2772     mPrimitiveTopologyListRestartFeatures.pNext       = nullptr;
2773     mExtendedDynamicStateFeatures.pNext               = nullptr;
2774     mExtendedDynamicState2Features.pNext              = nullptr;
2775     mGraphicsPipelineLibraryFeatures.pNext            = nullptr;
2776     mGraphicsPipelineLibraryProperties.pNext          = nullptr;
2777     mVertexInputDynamicStateFeatures.pNext            = nullptr;
2778     mDynamicRenderingFeatures.pNext                   = nullptr;
2779     mDynamicRenderingLocalReadFeatures.pNext          = nullptr;
2780     mFragmentShadingRateFeatures.pNext                = nullptr;
2781     mFragmentShaderInterlockFeatures.pNext            = nullptr;
2782     mImagelessFramebufferFeatures.pNext               = nullptr;
2783     mPipelineRobustnessFeatures.pNext                 = nullptr;
2784     mPipelineProtectedAccessFeatures.pNext            = nullptr;
2785     mRasterizationOrderAttachmentAccessFeatures.pNext = nullptr;
2786     mSwapchainMaintenance1Features.pNext              = nullptr;
2787     mDitheringFeatures.pNext                          = nullptr;
2788     mDrmProperties.pNext                              = nullptr;
2789     mTimelineSemaphoreFeatures.pNext                  = nullptr;
2790     mHostImageCopyFeatures.pNext                      = nullptr;
2791     mHostImageCopyProperties.pNext                    = nullptr;
2792     m8BitStorageFeatures.pNext                        = nullptr;
2793     m16BitStorageFeatures.pNext                       = nullptr;
2794     mSynchronization2Features.pNext                   = nullptr;
2795 #if defined(ANGLE_PLATFORM_ANDROID)
2796     mExternalFormatResolveFeatures.pNext   = nullptr;
2797     mExternalFormatResolveProperties.pNext = nullptr;
2798 #endif
2799 }
2800 
2801 // See comment above appendDeviceExtensionFeaturesNotPromoted.  Additional extensions are enabled
2802 // here which don't have feature structs:
2803 //
2804 // - VK_KHR_shared_presentable_image
2805 // - VK_EXT_memory_budget
2806 // - VK_KHR_incremental_present
2807 // - VK_EXT_queue_family_foreign
2808 // - VK_ANDROID_external_memory_android_hardware_buffer
2809 // - VK_GGP_frame_token
2810 // - VK_KHR_external_memory_fd
2811 // - VK_KHR_external_memory_fuchsia
2812 // - VK_KHR_external_semaphore_fd
2813 // - VK_KHR_external_fence_fd
2814 // - VK_FUCHSIA_external_semaphore
2815 // - VK_EXT_shader_stencil_export
2816 // - VK_EXT_load_store_op_none
2817 // - VK_QCOM_render_pass_store_ops
2818 // - VK_GOOGLE_display_timing
2819 // - VK_EXT_external_memory_dma_buf
2820 // - VK_EXT_image_drm_format_modifier
2821 // - VK_EXT_blend_operation_advanced
2822 // - VK_EXT_full_screen_exclusive
2823 //
enableDeviceExtensionsNotPromoted(const vk::ExtensionNameList & deviceExtensionNames)2824 void Renderer::enableDeviceExtensionsNotPromoted(const vk::ExtensionNameList &deviceExtensionNames)
2825 {
2826     if (mFeatures.supportsSharedPresentableImageExtension.enabled)
2827     {
2828         mEnabledDeviceExtensions.push_back(VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME);
2829     }
2830 
2831     if (mFeatures.supportsDepthClampZeroOne.enabled)
2832     {
2833         mEnabledDeviceExtensions.push_back(VK_EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME);
2834         vk::AddToPNextChain(&mEnabledFeatures, &mDepthClampZeroOneFeatures);
2835     }
2836 
2837     if (mFeatures.supportsMemoryBudget.enabled)
2838     {
2839         mEnabledDeviceExtensions.push_back(VK_EXT_MEMORY_BUDGET_EXTENSION_NAME);
2840     }
2841 
2842     if (mFeatures.supportsIncrementalPresent.enabled)
2843     {
2844         mEnabledDeviceExtensions.push_back(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME);
2845     }
2846 
2847 #if defined(ANGLE_PLATFORM_ANDROID)
2848     if (mFeatures.supportsAndroidHardwareBuffer.enabled)
2849     {
2850         mEnabledDeviceExtensions.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
2851         mEnabledDeviceExtensions.push_back(
2852             VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
2853     }
2854 #else
2855     ASSERT(!mFeatures.supportsAndroidHardwareBuffer.enabled);
2856 #endif
2857 
2858 #if defined(ANGLE_PLATFORM_GGP)
2859     if (mFeatures.supportsGGPFrameToken.enabled)
2860     {
2861         mEnabledDeviceExtensions.push_back(VK_GGP_FRAME_TOKEN_EXTENSION_NAME);
2862     }
2863 #else
2864     ASSERT(!mFeatures.supportsGGPFrameToken.enabled);
2865 #endif
2866 
2867     if (mFeatures.supportsExternalMemoryFd.enabled)
2868     {
2869         mEnabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
2870     }
2871 
2872     if (mFeatures.supportsExternalMemoryFuchsia.enabled)
2873     {
2874         mEnabledDeviceExtensions.push_back(VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME);
2875     }
2876 
2877     if (mFeatures.supportsExternalSemaphoreFd.enabled)
2878     {
2879         mEnabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
2880     }
2881 
2882     if (mFeatures.supportsExternalFenceFd.enabled)
2883     {
2884         mEnabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME);
2885     }
2886 
2887     if (mFeatures.supportsExternalSemaphoreFuchsia.enabled)
2888     {
2889         mEnabledDeviceExtensions.push_back(VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
2890     }
2891 
2892     if (mFeatures.supportsShaderStencilExport.enabled)
2893     {
2894         mEnabledDeviceExtensions.push_back(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME);
2895     }
2896 
2897     if (mFeatures.supportsRenderPassLoadStoreOpNone.enabled)
2898     {
2899         mEnabledDeviceExtensions.push_back(VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME);
2900     }
2901     else if (mFeatures.supportsRenderPassStoreOpNone.enabled)
2902     {
2903         mEnabledDeviceExtensions.push_back(VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME);
2904     }
2905 
2906     if (mFeatures.supportsTimestampSurfaceAttribute.enabled)
2907     {
2908         mEnabledDeviceExtensions.push_back(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME);
2909     }
2910 
2911     if (mFeatures.bresenhamLineRasterization.enabled)
2912     {
2913         mEnabledDeviceExtensions.push_back(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME);
2914         vk::AddToPNextChain(&mEnabledFeatures, &mLineRasterizationFeatures);
2915     }
2916 
2917     if (mFeatures.provokingVertex.enabled)
2918     {
2919         mEnabledDeviceExtensions.push_back(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME);
2920         vk::AddToPNextChain(&mEnabledFeatures, &mProvokingVertexFeatures);
2921     }
2922 
2923     if (mVertexAttributeDivisorFeatures.vertexAttributeInstanceRateDivisor)
2924     {
2925         mEnabledDeviceExtensions.push_back(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
2926         vk::AddToPNextChain(&mEnabledFeatures, &mVertexAttributeDivisorFeatures);
2927 
2928         // We only store 8 bit divisor in GraphicsPipelineDesc so capping value & we emulate if
2929         // exceeded
2930         mMaxVertexAttribDivisor =
2931             std::min(mVertexAttributeDivisorProperties.maxVertexAttribDivisor,
2932                      static_cast<uint32_t>(std::numeric_limits<uint8_t>::max()));
2933     }
2934 
2935     if (mFeatures.supportsTransformFeedbackExtension.enabled)
2936     {
2937         mEnabledDeviceExtensions.push_back(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
2938         vk::AddToPNextChain(&mEnabledFeatures, &mTransformFeedbackFeatures);
2939     }
2940 
2941     if (mFeatures.supportsCustomBorderColor.enabled)
2942     {
2943         mEnabledDeviceExtensions.push_back(VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME);
2944         vk::AddToPNextChain(&mEnabledFeatures, &mCustomBorderColorFeatures);
2945     }
2946 
2947     if (mFeatures.supportsIndexTypeUint8.enabled)
2948     {
2949         mEnabledDeviceExtensions.push_back(VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME);
2950         vk::AddToPNextChain(&mEnabledFeatures, &mIndexTypeUint8Features);
2951     }
2952 
2953     if (mFeatures.supportsMultisampledRenderToSingleSampled.enabled)
2954     {
2955         mEnabledDeviceExtensions.push_back(
2956             VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME);
2957         vk::AddToPNextChain(&mEnabledFeatures, &mMultisampledRenderToSingleSampledFeatures);
2958     }
2959 
2960     if (mFeatures.logMemoryReportCallbacks.enabled || mFeatures.logMemoryReportStats.enabled)
2961     {
2962         ASSERT(mMemoryReportFeatures.deviceMemoryReport);
2963         mEnabledDeviceExtensions.push_back(VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME);
2964     }
2965 
2966     if (mFeatures.supportsExternalMemoryDmaBufAndModifiers.enabled)
2967     {
2968         mEnabledDeviceExtensions.push_back(VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME);
2969         mEnabledDeviceExtensions.push_back(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME);
2970     }
2971 
2972     if (mFeatures.supportsDepthClipControl.enabled)
2973     {
2974         mEnabledDeviceExtensions.push_back(VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME);
2975         vk::AddToPNextChain(&mEnabledFeatures, &mDepthClipControlFeatures);
2976     }
2977 
2978     if (mFeatures.supportsPrimitivesGeneratedQuery.enabled)
2979     {
2980         mEnabledDeviceExtensions.push_back(VK_EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME);
2981         vk::AddToPNextChain(&mEnabledFeatures, &mPrimitivesGeneratedQueryFeatures);
2982     }
2983 
2984     if (mFeatures.supportsPrimitiveTopologyListRestart.enabled)
2985     {
2986         mEnabledDeviceExtensions.push_back(VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME);
2987         vk::AddToPNextChain(&mEnabledFeatures, &mPrimitiveTopologyListRestartFeatures);
2988     }
2989 
2990     if (mFeatures.supportsBlendOperationAdvanced.enabled)
2991     {
2992         mEnabledDeviceExtensions.push_back(VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME);
2993     }
2994 
2995     if (mFeatures.supportsGraphicsPipelineLibrary.enabled)
2996     {
2997         // VK_EXT_graphics_pipeline_library requires VK_KHR_pipeline_library
2998         ASSERT(ExtensionFound(VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME, deviceExtensionNames));
2999         mEnabledDeviceExtensions.push_back(VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME);
3000 
3001         mEnabledDeviceExtensions.push_back(VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME);
3002         vk::AddToPNextChain(&mEnabledFeatures, &mGraphicsPipelineLibraryFeatures);
3003     }
3004 
3005     if (mFeatures.supportsFragmentShadingRate.enabled)
3006     {
3007         mEnabledDeviceExtensions.push_back(VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME);
3008         vk::AddToPNextChain(&mEnabledFeatures, &mFragmentShadingRateFeatures);
3009     }
3010 
3011     if (mFeatures.supportsFragmentShaderPixelInterlock.enabled)
3012     {
3013         mEnabledDeviceExtensions.push_back(VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME);
3014         vk::AddToPNextChain(&mEnabledFeatures, &mFragmentShaderInterlockFeatures);
3015     }
3016 
3017     if (mFeatures.supportsPipelineRobustness.enabled)
3018     {
3019         mEnabledDeviceExtensions.push_back(VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME);
3020         vk::AddToPNextChain(&mEnabledFeatures, &mPipelineRobustnessFeatures);
3021     }
3022 
3023     if (mFeatures.supportsPipelineProtectedAccess.enabled)
3024     {
3025         mEnabledDeviceExtensions.push_back(VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME);
3026         vk::AddToPNextChain(&mEnabledFeatures, &mPipelineProtectedAccessFeatures);
3027     }
3028 
3029     if (mFeatures.supportsRasterizationOrderAttachmentAccess.enabled)
3030     {
3031         if (ExtensionFound(VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME,
3032                            deviceExtensionNames))
3033         {
3034             mEnabledDeviceExtensions.push_back(
3035                 VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME);
3036         }
3037         else
3038         {
3039             ASSERT(ExtensionFound(VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME,
3040                                   deviceExtensionNames));
3041             mEnabledDeviceExtensions.push_back(
3042                 VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME);
3043         }
3044         vk::AddToPNextChain(&mEnabledFeatures, &mRasterizationOrderAttachmentAccessFeatures);
3045     }
3046 
3047     if (mFeatures.supportsImage2dViewOf3d.enabled)
3048     {
3049         mEnabledDeviceExtensions.push_back(VK_EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME);
3050         vk::AddToPNextChain(&mEnabledFeatures, &mImage2dViewOf3dFeatures);
3051     }
3052 
3053     if (mFeatures.supportsSwapchainMaintenance1.enabled)
3054     {
3055         mEnabledDeviceExtensions.push_back(VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME);
3056         vk::AddToPNextChain(&mEnabledFeatures, &mSwapchainMaintenance1Features);
3057     }
3058 
3059     if (mFeatures.supportsLegacyDithering.enabled)
3060     {
3061         mEnabledDeviceExtensions.push_back(VK_EXT_LEGACY_DITHERING_EXTENSION_NAME);
3062         vk::AddToPNextChain(&mEnabledFeatures, &mDitheringFeatures);
3063     }
3064 
3065     if (mFeatures.supportsFormatFeatureFlags2.enabled)
3066     {
3067         mEnabledDeviceExtensions.push_back(VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME);
3068     }
3069 
3070     if (mFeatures.supportsHostImageCopy.enabled)
3071     {
3072         // VK_EXT_host_image_copy requires VK_KHR_copy_commands2 and VK_KHR_format_feature_flags2.
3073         // VK_KHR_format_feature_flags2 is enabled separately.
3074         ASSERT(ExtensionFound(VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME, deviceExtensionNames));
3075         ASSERT(ExtensionFound(VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME, deviceExtensionNames));
3076         mEnabledDeviceExtensions.push_back(VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME);
3077 
3078         mEnabledDeviceExtensions.push_back(VK_EXT_HOST_IMAGE_COPY_EXTENSION_NAME);
3079         vk::AddToPNextChain(&mEnabledFeatures, &mHostImageCopyFeatures);
3080     }
3081 
3082     if (getFeatures().supportsVertexInputDynamicState.enabled)
3083     {
3084         mEnabledDeviceExtensions.push_back(VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME);
3085         vk::AddToPNextChain(&mEnabledFeatures, &mVertexInputDynamicStateFeatures);
3086     }
3087 
3088     if (getFeatures().supportsDynamicRendering.enabled)
3089     {
3090         mEnabledDeviceExtensions.push_back(VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME);
3091         vk::AddToPNextChain(&mEnabledFeatures, &mDynamicRenderingFeatures);
3092     }
3093 
3094     if (getFeatures().supportsDynamicRenderingLocalRead.enabled)
3095     {
3096         mEnabledDeviceExtensions.push_back(VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_EXTENSION_NAME);
3097         vk::AddToPNextChain(&mEnabledFeatures, &mDynamicRenderingLocalReadFeatures);
3098     }
3099 
3100 #if defined(ANGLE_PLATFORM_WINDOWS)
3101     // We only need the VK_EXT_full_screen_exclusive extension if we are opting
3102     // out of it via VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT (i.e. working
3103     // around driver bugs).
3104     if (getFeatures().supportsFullScreenExclusive.enabled &&
3105         getFeatures().forceDisableFullScreenExclusive.enabled)
3106     {
3107         mEnabledDeviceExtensions.push_back(VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME);
3108     }
3109 #endif
3110 
3111 #if defined(ANGLE_PLATFORM_ANDROID)
3112     if (mFeatures.supportsExternalFormatResolve.enabled)
3113     {
3114         mEnabledDeviceExtensions.push_back(VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_EXTENSION_NAME);
3115         vk::AddToPNextChain(&mEnabledFeatures, &mExternalFormatResolveFeatures);
3116     }
3117 #endif
3118 }
3119 
3120 // See comment above appendDeviceExtensionFeaturesPromotedTo11.  Additional extensions are enabled
3121 // here which don't have feature structs:
3122 //
3123 // - VK_KHR_get_memory_requirements2
3124 // - VK_KHR_bind_memory2
3125 // - VK_KHR_maintenance1
3126 // - VK_KHR_external_memory
3127 // - VK_KHR_external_semaphore
3128 // - VK_KHR_external_fence
3129 //
enableDeviceExtensionsPromotedTo11(const vk::ExtensionNameList & deviceExtensionNames)3130 void Renderer::enableDeviceExtensionsPromotedTo11(const vk::ExtensionNameList &deviceExtensionNames)
3131 {
3132     // OVR_multiview disallows multiview with geometry and tessellation, so don't request these
3133     // features.
3134     mMultiviewFeatures.multiviewGeometryShader            = VK_FALSE;
3135     mMultiviewFeatures.multiviewTessellationShader        = VK_FALSE;
3136     mPhysicalDevice11Features.multiviewGeometryShader     = VK_FALSE;
3137     mPhysicalDevice11Features.multiviewTessellationShader = VK_FALSE;
3138 
3139     // Disable protected memory if not needed as it can introduce overhead
3140     if (!mFeatures.supportsProtectedMemory.enabled)
3141     {
3142         mPhysicalDevice11Features.protectedMemory = VK_FALSE;
3143     }
3144 
3145     if (mFeatures.supportsMultiview.enabled)
3146     {
3147         vk::AddToPNextChain(&mEnabledFeatures, &mMultiviewFeatures);
3148     }
3149 
3150     if (mFeatures.supportsYUVSamplerConversion.enabled)
3151     {
3152         vk::AddToPNextChain(&mEnabledFeatures, &mSamplerYcbcrConversionFeatures);
3153     }
3154 
3155     if (mFeatures.supportsProtectedMemory.enabled)
3156     {
3157         vk::AddToPNextChain(&mEnabledFeatures, &mProtectedMemoryFeatures);
3158     }
3159 
3160     if (mFeatures.supports16BitStorageBuffer.enabled ||
3161         mFeatures.supports16BitUniformAndStorageBuffer.enabled ||
3162         mFeatures.supports16BitPushConstant.enabled || mFeatures.supports16BitInputOutput.enabled)
3163     {
3164         mEnabledDeviceExtensions.push_back(VK_KHR_16BIT_STORAGE_EXTENSION_NAME);
3165         vk::AddToPNextChain(&mEnabledFeatures, &m16BitStorageFeatures);
3166     }
3167 }
3168 
3169 // See comment above appendDeviceExtensionFeaturesPromotedTo12.  Additional extensions are enabled
3170 // here which don't have feature structs:
3171 //
3172 // - VK_KHR_create_renderpass2
3173 // - VK_KHR_image_format_list
3174 // - VK_KHR_sampler_mirror_clamp_to_edge
3175 //
enableDeviceExtensionsPromotedTo12(const vk::ExtensionNameList & deviceExtensionNames)3176 void Renderer::enableDeviceExtensionsPromotedTo12(const vk::ExtensionNameList &deviceExtensionNames)
3177 {
3178     if (mFeatures.supportsRenderpass2.enabled)
3179     {
3180         mEnabledDeviceExtensions.push_back(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
3181     }
3182 
3183     if (mFeatures.supportsImageFormatList.enabled)
3184     {
3185         mEnabledDeviceExtensions.push_back(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
3186     }
3187 
3188     if (mFeatures.supportsSPIRV14.enabled)
3189     {
3190         mEnabledDeviceExtensions.push_back(VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME);
3191         mEnabledDeviceExtensions.push_back(VK_KHR_SPIRV_1_4_EXTENSION_NAME);
3192     }
3193 
3194     if (mFeatures.supportsSamplerMirrorClampToEdge.enabled)
3195     {
3196         mEnabledDeviceExtensions.push_back(VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME);
3197     }
3198 
3199     if (mFeatures.supportsDepthStencilResolve.enabled)
3200     {
3201         mEnabledDeviceExtensions.push_back(VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME);
3202     }
3203 
3204     if (mFeatures.allowGenerateMipmapWithCompute.enabled)
3205     {
3206         mEnabledDeviceExtensions.push_back(VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME);
3207         vk::AddToPNextChain(&mEnabledFeatures, &mSubgroupExtendedTypesFeatures);
3208     }
3209 
3210     if (mFeatures.supportsShaderFloat16.enabled)
3211     {
3212         mEnabledDeviceExtensions.push_back(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME);
3213         vk::AddToPNextChain(&mEnabledFeatures, &mShaderFloat16Int8Features);
3214     }
3215 
3216     if (mFeatures.supportsHostQueryReset.enabled)
3217     {
3218         mEnabledDeviceExtensions.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
3219         vk::AddToPNextChain(&mEnabledFeatures, &mHostQueryResetFeatures);
3220     }
3221 
3222     if (mFeatures.supportsImagelessFramebuffer.enabled)
3223     {
3224         mEnabledDeviceExtensions.push_back(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
3225         vk::AddToPNextChain(&mEnabledFeatures, &mImagelessFramebufferFeatures);
3226     }
3227 
3228     if (mFeatures.supportsTimelineSemaphore.enabled)
3229     {
3230         mEnabledDeviceExtensions.push_back(VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME);
3231         vk::AddToPNextChain(&mEnabledFeatures, &mTimelineSemaphoreFeatures);
3232     }
3233 
3234     if (mFeatures.supports8BitStorageBuffer.enabled ||
3235         mFeatures.supports8BitUniformAndStorageBuffer.enabled ||
3236         mFeatures.supports8BitPushConstant.enabled)
3237     {
3238         mEnabledDeviceExtensions.push_back(VK_KHR_8BIT_STORAGE_EXTENSION_NAME);
3239         vk::AddToPNextChain(&mEnabledFeatures, &m8BitStorageFeatures);
3240     }
3241 }
3242 
3243 // See comment above appendDeviceExtensionFeaturesPromotedTo13.
enableDeviceExtensionsPromotedTo13(const vk::ExtensionNameList & deviceExtensionNames)3244 void Renderer::enableDeviceExtensionsPromotedTo13(const vk::ExtensionNameList &deviceExtensionNames)
3245 {
3246     if (mFeatures.supportsPipelineCreationFeedback.enabled)
3247     {
3248         mEnabledDeviceExtensions.push_back(VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME);
3249     }
3250 
3251     if (mFeatures.supportsExtendedDynamicState.enabled)
3252     {
3253         mEnabledDeviceExtensions.push_back(VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME);
3254         vk::AddToPNextChain(&mEnabledFeatures, &mExtendedDynamicStateFeatures);
3255     }
3256 
3257     if (mFeatures.supportsExtendedDynamicState2.enabled)
3258     {
3259         mEnabledDeviceExtensions.push_back(VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME);
3260         vk::AddToPNextChain(&mEnabledFeatures, &mExtendedDynamicState2Features);
3261     }
3262 
3263     if (mFeatures.supportsSynchronization2.enabled)
3264     {
3265         mEnabledDeviceExtensions.push_back(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME);
3266         vk::AddToPNextChain(&mEnabledFeatures, &mSynchronization2Features);
3267     }
3268 }
3269 
enableDeviceExtensions(vk::Context * context,const angle::FeatureOverrides & featureOverrides,UseVulkanSwapchain useVulkanSwapchain,angle::NativeWindowSystem nativeWindowSystem)3270 angle::Result Renderer::enableDeviceExtensions(vk::Context *context,
3271                                                const angle::FeatureOverrides &featureOverrides,
3272                                                UseVulkanSwapchain useVulkanSwapchain,
3273                                                angle::NativeWindowSystem nativeWindowSystem)
3274 {
3275     // Enumerate device extensions that are provided by the vulkan
3276     // implementation and implicit layers.
3277     uint32_t deviceExtensionCount = 0;
3278     ANGLE_VK_TRY(context, vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
3279                                                                &deviceExtensionCount, nullptr));
3280 
3281     // Work-around a race condition in the Android platform during Android start-up, that can cause
3282     // the second call to vkEnumerateDeviceExtensionProperties to have an additional extension.  In
3283     // that case, the second call will return VK_INCOMPLETE.  To work-around that, add 1 to
3284     // deviceExtensionCount and ask for one more extension property than the first call said there
3285     // were.  See: http://anglebug.com/6715 and internal-to-Google bug: b/206733351.
3286     deviceExtensionCount++;
3287     std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
3288     ANGLE_VK_TRY(context,
3289                  vkEnumerateDeviceExtensionProperties(
3290                      mPhysicalDevice, nullptr, &deviceExtensionCount, deviceExtensionProps.data()));
3291     // In case fewer items were returned than requested, resize deviceExtensionProps to the number
3292     // of extensions returned (i.e. deviceExtensionCount).  See: b/208937840
3293     deviceExtensionProps.resize(deviceExtensionCount);
3294 
3295     // Enumerate device extensions that are provided by explicit layers.
3296     for (const char *layerName : mEnabledDeviceLayerNames)
3297     {
3298         uint32_t previousExtensionCount    = static_cast<uint32_t>(deviceExtensionProps.size());
3299         uint32_t deviceLayerExtensionCount = 0;
3300         ANGLE_VK_TRY(context, vkEnumerateDeviceExtensionProperties(
3301                                   mPhysicalDevice, layerName, &deviceLayerExtensionCount, nullptr));
3302         deviceExtensionProps.resize(previousExtensionCount + deviceLayerExtensionCount);
3303         ANGLE_VK_TRY(context, vkEnumerateDeviceExtensionProperties(
3304                                   mPhysicalDevice, layerName, &deviceLayerExtensionCount,
3305                                   deviceExtensionProps.data() + previousExtensionCount));
3306         // In case fewer items were returned than requested, resize deviceExtensionProps to the
3307         // number of extensions returned (i.e. deviceLayerExtensionCount).
3308         deviceExtensionProps.resize(previousExtensionCount + deviceLayerExtensionCount);
3309     }
3310 
3311     // Get the list of device extensions that are available.
3312     vk::ExtensionNameList deviceExtensionNames;
3313     if (!deviceExtensionProps.empty())
3314     {
3315         ASSERT(deviceExtensionNames.size() <= deviceExtensionProps.size());
3316         for (const VkExtensionProperties &prop : deviceExtensionProps)
3317         {
3318             deviceExtensionNames.push_back(prop.extensionName);
3319         }
3320         std::sort(deviceExtensionNames.begin(), deviceExtensionNames.end(), StrLess);
3321     }
3322 
3323     if (useVulkanSwapchain == UseVulkanSwapchain::Yes)
3324     {
3325         mEnabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
3326     }
3327 
3328     // Query extensions and their features.
3329     queryDeviceExtensionFeatures(deviceExtensionNames);
3330 
3331     // Initialize features and workarounds.
3332     initFeatures(deviceExtensionNames, featureOverrides, useVulkanSwapchain, nativeWindowSystem);
3333 
3334     // App based feature overrides.
3335     appBasedFeatureOverrides(deviceExtensionNames);
3336 
3337     // Enable extensions that could be used
3338     enableDeviceExtensionsNotPromoted(deviceExtensionNames);
3339     enableDeviceExtensionsPromotedTo11(deviceExtensionNames);
3340     enableDeviceExtensionsPromotedTo12(deviceExtensionNames);
3341     enableDeviceExtensionsPromotedTo13(deviceExtensionNames);
3342 
3343     std::sort(mEnabledDeviceExtensions.begin(), mEnabledDeviceExtensions.end(), StrLess);
3344     ANGLE_VK_TRY(context, VerifyExtensionsPresent(deviceExtensionNames, mEnabledDeviceExtensions));
3345 
3346     return angle::Result::Continue;
3347 }
3348 
initInstanceExtensionEntryPoints()3349 void Renderer::initInstanceExtensionEntryPoints()
3350 {
3351 #if !defined(ANGLE_SHARED_LIBVULKAN)
3352     // Instance entry points
3353     if (mFeatures.supportsExternalSemaphoreFd.enabled ||
3354         mFeatures.supportsExternalSemaphoreFuchsia.enabled)
3355     {
3356         InitExternalSemaphoreFdFunctions(mInstance);
3357     }
3358 
3359     if (mFeatures.supportsExternalFenceFd.enabled)
3360     {
3361         InitExternalFenceFdFunctions(mInstance);
3362     }
3363 
3364 #    if defined(ANGLE_PLATFORM_ANDROID)
3365     if (mFeatures.supportsAndroidHardwareBuffer.enabled)
3366     {
3367         InitExternalMemoryHardwareBufferANDROIDFunctions(mInstance);
3368     }
3369 #    endif
3370 #endif
3371 
3372     // For promoted extensions, initialize their entry points from the core version.
3373     initializeInstanceExtensionEntryPointsFromCore();
3374 }
3375 
initDeviceExtensionEntryPoints()3376 void Renderer::initDeviceExtensionEntryPoints()
3377 {
3378 #if !defined(ANGLE_SHARED_LIBVULKAN)
3379     // Device entry points
3380     if (mFeatures.supportsTransformFeedbackExtension.enabled)
3381     {
3382         InitTransformFeedbackEXTFunctions(mDevice);
3383     }
3384     if (useLogicOpDynamicState())
3385     {
3386         // VK_EXT_extended_dynamic_state2 is only partially core in Vulkan 1.3.  If the logicOp
3387         // dynamic state (only from the extension) is used, need to load the entry points from the
3388         // extension
3389         InitExtendedDynamicState2EXTFunctions(mDevice);
3390     }
3391     if (mFeatures.supportsFragmentShadingRate.enabled)
3392     {
3393         InitFragmentShadingRateKHRDeviceFunction(mDevice);
3394     }
3395     if (mFeatures.supportsTimestampSurfaceAttribute.enabled)
3396     {
3397         InitGetPastPresentationTimingGoogleFunction(mDevice);
3398     }
3399     if (mFeatures.supportsHostImageCopy.enabled)
3400     {
3401         InitHostImageCopyFunctions(mDevice);
3402     }
3403     if (mFeatures.supportsVertexInputDynamicState.enabled)
3404     {
3405         InitVertexInputDynamicStateEXTFunctions(mDevice);
3406     }
3407     if (mFeatures.supportsDynamicRenderingLocalRead.enabled)
3408     {
3409         InitDynamicRenderingLocalReadFunctions(mDevice);
3410     }
3411     // Extensions promoted to Vulkan 1.2
3412     {
3413         if (mFeatures.supportsHostQueryReset.enabled)
3414         {
3415             InitHostQueryResetFunctions(mDevice);
3416         }
3417         if (mFeatures.supportsRenderpass2.enabled)
3418         {
3419             InitRenderPass2KHRFunctions(mDevice);
3420         }
3421     }
3422     // Extensions promoted to Vulkan 1.3
3423     {
3424         if (mFeatures.supportsExtendedDynamicState.enabled)
3425         {
3426             InitExtendedDynamicStateEXTFunctions(mDevice);
3427         }
3428         if (mFeatures.supportsExtendedDynamicState2.enabled)
3429         {
3430             InitExtendedDynamicState2EXTFunctions(mDevice);
3431         }
3432         if (mFeatures.supportsDynamicRendering.enabled)
3433         {
3434             InitDynamicRenderingFunctions(mDevice);
3435         }
3436     }
3437 #endif  // !defined(ANGLE_SHARED_LIBVULKAN)
3438 
3439     // For promoted extensions, initialize their entry points from the core version.
3440     initializeDeviceExtensionEntryPointsFromCore();
3441 }
3442 
setupDevice(vk::Context * context,const angle::FeatureOverrides & featureOverrides,const char * wsiLayer,UseVulkanSwapchain useVulkanSwapchain,angle::NativeWindowSystem nativeWindowSystem)3443 angle::Result Renderer::setupDevice(vk::Context *context,
3444                                     const angle::FeatureOverrides &featureOverrides,
3445                                     const char *wsiLayer,
3446                                     UseVulkanSwapchain useVulkanSwapchain,
3447                                     angle::NativeWindowSystem nativeWindowSystem)
3448 {
3449     uint32_t deviceLayerCount = 0;
3450     ANGLE_VK_TRY(context,
3451                  vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
3452 
3453     std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
3454     ANGLE_VK_TRY(context, vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
3455                                                            deviceLayerProps.data()));
3456 
3457     mEnabledDeviceLayerNames.clear();
3458     if (mEnableValidationLayers)
3459     {
3460         mEnableValidationLayers =
3461             GetAvailableValidationLayers(deviceLayerProps, false, &mEnabledDeviceLayerNames);
3462     }
3463 
3464     if (wsiLayer != nullptr)
3465     {
3466         mEnabledDeviceLayerNames.push_back(wsiLayer);
3467     }
3468 
3469     mEnabledFeatures       = {};
3470     mEnabledFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
3471 
3472     ANGLE_TRY(
3473         enableDeviceExtensions(context, featureOverrides, useVulkanSwapchain, nativeWindowSystem));
3474 
3475     // Used to support cubemap array:
3476     mEnabledFeatures.features.imageCubeArray = mFeatures.supportsImageCubeArray.enabled;
3477     // Used to support framebuffers with multiple attachments:
3478     mEnabledFeatures.features.independentBlend = mPhysicalDeviceFeatures.independentBlend;
3479     // Used to support multi_draw_indirect
3480     mEnabledFeatures.features.multiDrawIndirect = mPhysicalDeviceFeatures.multiDrawIndirect;
3481     mEnabledFeatures.features.drawIndirectFirstInstance =
3482         mPhysicalDeviceFeatures.drawIndirectFirstInstance;
3483     // Used to support robust buffer access, if VK_EXT_pipeline_robustness is not supported.
3484     if (!mFeatures.supportsPipelineRobustness.enabled)
3485     {
3486         mEnabledFeatures.features.robustBufferAccess = mPhysicalDeviceFeatures.robustBufferAccess;
3487     }
3488     // Used to support Anisotropic filtering:
3489     mEnabledFeatures.features.samplerAnisotropy = mPhysicalDeviceFeatures.samplerAnisotropy;
3490     // Used to support wide lines:
3491     mEnabledFeatures.features.wideLines = mPhysicalDeviceFeatures.wideLines;
3492     // Used to emulate transform feedback:
3493     mEnabledFeatures.features.vertexPipelineStoresAndAtomics =
3494         mPhysicalDeviceFeatures.vertexPipelineStoresAndAtomics;
3495     // Used to implement storage buffers and images in the fragment shader:
3496     mEnabledFeatures.features.fragmentStoresAndAtomics =
3497         mPhysicalDeviceFeatures.fragmentStoresAndAtomics;
3498     // Used to emulate the primitives generated query:
3499     mEnabledFeatures.features.pipelineStatisticsQuery =
3500         !mFeatures.supportsPrimitivesGeneratedQuery.enabled &&
3501         mFeatures.supportsPipelineStatisticsQuery.enabled;
3502     // Used to support geometry shaders:
3503     mEnabledFeatures.features.geometryShader = mPhysicalDeviceFeatures.geometryShader;
3504     // Used to support EXT/OES_gpu_shader5:
3505     mEnabledFeatures.features.shaderImageGatherExtended =
3506         mPhysicalDeviceFeatures.shaderImageGatherExtended;
3507     // Used to support EXT/OES_gpu_shader5:
3508     mEnabledFeatures.features.shaderUniformBufferArrayDynamicIndexing =
3509         mPhysicalDeviceFeatures.shaderUniformBufferArrayDynamicIndexing;
3510     mEnabledFeatures.features.shaderSampledImageArrayDynamicIndexing =
3511         mPhysicalDeviceFeatures.shaderSampledImageArrayDynamicIndexing;
3512     // Used to support APPLE_clip_distance
3513     mEnabledFeatures.features.shaderClipDistance = mPhysicalDeviceFeatures.shaderClipDistance;
3514     // Used to support OES_sample_shading
3515     mEnabledFeatures.features.sampleRateShading = mPhysicalDeviceFeatures.sampleRateShading;
3516     // Used to support EXT_depth_clamp and depth clears through draw calls
3517     mEnabledFeatures.features.depthClamp = mPhysicalDeviceFeatures.depthClamp;
3518     // Used to support EXT_polygon_offset_clamp
3519     mEnabledFeatures.features.depthBiasClamp = mPhysicalDeviceFeatures.depthBiasClamp;
3520     // Used to support NV_polygon_mode / ANGLE_polygon_mode
3521     mEnabledFeatures.features.fillModeNonSolid = mPhysicalDeviceFeatures.fillModeNonSolid;
3522     // Used to support EXT_clip_cull_distance
3523     mEnabledFeatures.features.shaderCullDistance = mPhysicalDeviceFeatures.shaderCullDistance;
3524     // Used to support tessellation Shader:
3525     mEnabledFeatures.features.tessellationShader = mPhysicalDeviceFeatures.tessellationShader;
3526     // Used to support EXT_blend_func_extended
3527     mEnabledFeatures.features.dualSrcBlend = mPhysicalDeviceFeatures.dualSrcBlend;
3528     // Used to support ANGLE_logic_op and GLES1
3529     mEnabledFeatures.features.logicOp = mPhysicalDeviceFeatures.logicOp;
3530     // Used to support EXT_multisample_compatibility
3531     mEnabledFeatures.features.alphaToOne = mPhysicalDeviceFeatures.alphaToOne;
3532 
3533     if (!vk::OutsideRenderPassCommandBuffer::ExecutesInline() ||
3534         !vk::RenderPassCommandBuffer::ExecutesInline())
3535     {
3536         mEnabledFeatures.features.inheritedQueries = mPhysicalDeviceFeatures.inheritedQueries;
3537     }
3538 
3539     return angle::Result::Continue;
3540 }
3541 
createDeviceAndQueue(vk::Context * context,uint32_t queueFamilyIndex)3542 angle::Result Renderer::createDeviceAndQueue(vk::Context *context, uint32_t queueFamilyIndex)
3543 {
3544     mCurrentQueueFamilyIndex = queueFamilyIndex;
3545 
3546     vk::QueueFamily queueFamily;
3547     queueFamily.initialize(mQueueFamilyProperties[queueFamilyIndex], queueFamilyIndex);
3548     ANGLE_VK_CHECK(context, queueFamily.getDeviceQueueCount() > 0, VK_ERROR_INITIALIZATION_FAILED);
3549 
3550     // We enable protected context only if both supportsProtectedMemory and device also supports
3551     // protected. There are cases we have to disable supportsProtectedMemory feature due to driver
3552     // bugs.
3553     bool enableProtectedContent =
3554         queueFamily.supportsProtected() && mFeatures.supportsProtectedMemory.enabled;
3555 
3556     uint32_t queueCount = std::min(queueFamily.getDeviceQueueCount(),
3557                                    static_cast<uint32_t>(egl::ContextPriority::EnumCount));
3558 
3559     uint32_t queueCreateInfoCount              = 1;
3560     VkDeviceQueueCreateInfo queueCreateInfo[1] = {};
3561     queueCreateInfo[0].sType                   = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
3562     queueCreateInfo[0].flags = enableProtectedContent ? VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT : 0;
3563     queueCreateInfo[0].queueFamilyIndex = queueFamilyIndex;
3564     queueCreateInfo[0].queueCount       = queueCount;
3565     queueCreateInfo[0].pQueuePriorities = vk::QueueFamily::kQueuePriorities;
3566 
3567     // Setup device initialization struct
3568     VkDeviceCreateInfo createInfo    = {};
3569     createInfo.sType                 = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
3570     createInfo.flags                 = 0;
3571     createInfo.queueCreateInfoCount  = queueCreateInfoCount;
3572     createInfo.pQueueCreateInfos     = queueCreateInfo;
3573     createInfo.enabledLayerCount     = static_cast<uint32_t>(mEnabledDeviceLayerNames.size());
3574     createInfo.ppEnabledLayerNames   = mEnabledDeviceLayerNames.data();
3575     createInfo.enabledExtensionCount = static_cast<uint32_t>(mEnabledDeviceExtensions.size());
3576     createInfo.ppEnabledExtensionNames =
3577         mEnabledDeviceExtensions.empty() ? nullptr : mEnabledDeviceExtensions.data();
3578     mEnabledDeviceExtensions.push_back(nullptr);
3579 
3580     // Enable core features without assuming VkPhysicalDeviceFeatures2KHR is accepted in the
3581     // pNext chain of VkDeviceCreateInfo.
3582     createInfo.pEnabledFeatures = &mEnabledFeatures.features;
3583 
3584     // Append the feature structs chain to the end of createInfo structs chain.
3585     if (mEnabledFeatures.pNext)
3586     {
3587         vk::AppendToPNextChain(&createInfo, mEnabledFeatures.pNext);
3588     }
3589 
3590     if (mFeatures.logMemoryReportCallbacks.enabled || mFeatures.logMemoryReportStats.enabled)
3591     {
3592         ASSERT(mMemoryReportFeatures.deviceMemoryReport);
3593 
3594         mMemoryReportCallback       = {};
3595         mMemoryReportCallback.sType = VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT;
3596         mMemoryReportCallback.pfnUserCallback = &MemoryReportCallback;
3597         mMemoryReportCallback.pUserData       = this;
3598         vk::AddToPNextChain(&createInfo, &mMemoryReportCallback);
3599     }
3600 
3601     // Create the list of expected VVL messages to suppress.  Done before creating the device, as it
3602     // may also generate messages.
3603     initializeValidationMessageSuppressions();
3604 
3605     ANGLE_VK_TRY(context, vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
3606 #if defined(ANGLE_SHARED_LIBVULKAN)
3607     // Load volk if we are loading dynamically
3608     volkLoadDevice(mDevice);
3609 #endif  // defined(ANGLE_SHARED_LIBVULKAN)
3610 
3611     initDeviceExtensionEntryPoints();
3612 
3613     ANGLE_TRY(mCommandQueue.init(context, queueFamily, enableProtectedContent, queueCount));
3614     ANGLE_TRY(mCommandProcessor.init());
3615 
3616     if (mFeatures.forceMaxUniformBufferSize16KB.enabled)
3617     {
3618         mDefaultUniformBufferSize = kMinDefaultUniformBufferSize;
3619     }
3620     // Cap it with the driver limit
3621     mDefaultUniformBufferSize = std::min(
3622         mDefaultUniformBufferSize, getPhysicalDeviceProperties().limits.maxUniformBufferRange);
3623 
3624     // Initialize the vulkan pipeline cache.
3625     ANGLE_TRY(ensurePipelineCacheInitialized(context));
3626 
3627     // Track the set of supported pipeline stages.  This is used when issuing image layout
3628     // transitions that cover many stages (such as AllGraphicsReadOnly) to mask out unsupported
3629     // stages, which avoids enumerating every possible combination of stages in the layouts.
3630     VkPipelineStageFlags unsupportedStages = 0;
3631     mSupportedVulkanShaderStageMask =
3632         VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_COMPUTE_BIT;
3633     mSupportedBufferWritePipelineStageMask =
3634         VK_PIPELINE_STAGE_TRANSFER_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT |
3635         VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
3636 
3637     if (!mPhysicalDeviceFeatures.tessellationShader)
3638     {
3639         unsupportedStages |= VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
3640                              VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT;
3641     }
3642     else
3643     {
3644         mSupportedVulkanShaderStageMask |=
3645             VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
3646         mSupportedBufferWritePipelineStageMask |=
3647             VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
3648             VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT;
3649     }
3650     if (!mPhysicalDeviceFeatures.geometryShader)
3651     {
3652         unsupportedStages |= VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT;
3653     }
3654     else
3655     {
3656         mSupportedVulkanShaderStageMask |= VK_SHADER_STAGE_GEOMETRY_BIT;
3657         mSupportedBufferWritePipelineStageMask |= VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT;
3658     }
3659 
3660     if (getFeatures().supportsTransformFeedbackExtension.enabled)
3661     {
3662         mSupportedBufferWritePipelineStageMask |= VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT;
3663     }
3664 
3665     // Initialize the barrierData tables by removing unsupported pipeline stage bits
3666     InitializeEventAndPipelineStagesMap(&mEventStageAndPipelineStageFlagsMap, ~unsupportedStages);
3667     InitializeImageLayoutAndMemoryBarrierDataMap(&mImageLayoutAndMemoryBarrierDataMap,
3668                                                  ~unsupportedStages);
3669     // mEventStageAndPipelineStageFlagsMap supposedly should match the value in dstStageMask of
3670     // mImageLayoutAndMemoryBarrierData
3671     ASSERT(EventAndPipelineBarrierHaveMatchingStageFlags(mEventStageAndPipelineStageFlagsMap,
3672                                                          mImageLayoutAndMemoryBarrierDataMap));
3673 
3674     ANGLE_TRY(initializeMemoryAllocator(context));
3675 
3676     // Log the memory heap stats when the device has been initialized (when debugging).
3677     mMemoryAllocationTracker.onDeviceInit();
3678 
3679     return angle::Result::Continue;
3680 }
3681 
calculatePendingGarbageSizeLimit()3682 void Renderer::calculatePendingGarbageSizeLimit()
3683 {
3684     // To find the threshold, we want the memory heap that has the largest size among other heaps.
3685     VkPhysicalDeviceMemoryProperties memoryProperties;
3686     vkGetPhysicalDeviceMemoryProperties(mPhysicalDevice, &memoryProperties);
3687     ASSERT(memoryProperties.memoryHeapCount > 0);
3688 
3689     VkDeviceSize maxHeapSize = memoryProperties.memoryHeaps[0].size;
3690     for (size_t i = 0; i < memoryProperties.memoryHeapCount; i++)
3691     {
3692         VkDeviceSize heapSize = memoryProperties.memoryHeaps[i].size;
3693         if (maxHeapSize < heapSize)
3694         {
3695             maxHeapSize = heapSize;
3696         }
3697     }
3698 
3699     // We set the limit to a portion of the heap size we found.
3700     constexpr float kGarbageSizeLimitCoefficient = 0.2f;
3701     mPendingGarbageSizeLimit =
3702         static_cast<VkDeviceSize>(maxHeapSize * kGarbageSizeLimitCoefficient);
3703 }
3704 
initializeValidationMessageSuppressions()3705 void Renderer::initializeValidationMessageSuppressions()
3706 {
3707     // Build the list of validation errors that are currently expected and should be skipped.
3708     mSkippedValidationMessages.insert(mSkippedValidationMessages.end(), kSkippedMessages,
3709                                       kSkippedMessages + ArraySize(kSkippedMessages));
3710     if (!getFeatures().supportsPrimitiveTopologyListRestart.enabled)
3711     {
3712         mSkippedValidationMessages.insert(
3713             mSkippedValidationMessages.end(), kNoListRestartSkippedMessages,
3714             kNoListRestartSkippedMessages + ArraySize(kNoListRestartSkippedMessages));
3715     }
3716 
3717     if (getFeatures().useVkEventForImageBarrier.enabled &&
3718         (!vk::OutsideRenderPassCommandBuffer::ExecutesInline() ||
3719          !vk::RenderPassCommandBuffer::ExecutesInline()))
3720     {
3721         mSkippedValidationMessages.insert(
3722             mSkippedValidationMessages.end(), kSkippedMessagesWithVulkanSecondaryCommandBuffer,
3723             kSkippedMessagesWithVulkanSecondaryCommandBuffer +
3724                 ArraySize(kSkippedMessagesWithVulkanSecondaryCommandBuffer));
3725     }
3726 
3727     // Build the list of syncval errors that are currently expected and should be skipped.
3728     mSkippedSyncvalMessages.insert(mSkippedSyncvalMessages.end(), kSkippedSyncvalMessages,
3729                                    kSkippedSyncvalMessages + ArraySize(kSkippedSyncvalMessages));
3730     if (!getFeatures().supportsRenderPassStoreOpNone.enabled &&
3731         !getFeatures().supportsRenderPassLoadStoreOpNone.enabled)
3732     {
3733         mSkippedSyncvalMessages.insert(mSkippedSyncvalMessages.end(),
3734                                        kSkippedSyncvalMessagesWithoutStoreOpNone,
3735                                        kSkippedSyncvalMessagesWithoutStoreOpNone +
3736                                            ArraySize(kSkippedSyncvalMessagesWithoutStoreOpNone));
3737     }
3738     if (!getFeatures().supportsRenderPassLoadStoreOpNone.enabled)
3739     {
3740         mSkippedSyncvalMessages.insert(
3741             mSkippedSyncvalMessages.end(), kSkippedSyncvalMessagesWithoutLoadStoreOpNone,
3742             kSkippedSyncvalMessagesWithoutLoadStoreOpNone +
3743                 ArraySize(kSkippedSyncvalMessagesWithoutLoadStoreOpNone));
3744     }
3745     if (getFeatures().enableMultisampledRenderToTexture.enabled &&
3746         !getFeatures().supportsMultisampledRenderToSingleSampled.enabled)
3747     {
3748         mSkippedSyncvalMessages.insert(mSkippedSyncvalMessages.end(),
3749                                        kSkippedSyncvalMessagesWithMSRTTEmulation,
3750                                        kSkippedSyncvalMessagesWithMSRTTEmulation +
3751                                            ArraySize(kSkippedSyncvalMessagesWithMSRTTEmulation));
3752     }
3753 }
3754 
checkQueueForSurfacePresent(vk::Context * context,VkSurfaceKHR surface,bool * supportedOut)3755 angle::Result Renderer::checkQueueForSurfacePresent(vk::Context *context,
3756                                                     VkSurfaceKHR surface,
3757                                                     bool *supportedOut)
3758 {
3759     // We've already initialized a device, and can't re-create it unless it's never been used.
3760     // If recreation is ever necessary, it should be able to deal with contexts currently running in
3761     // other threads using the existing queue.  For example, multiple contexts (not in a share
3762     // group) may be currently recording commands and rendering to pbuffers or using
3763     // EGL_KHR_surfaceless_context.
3764     ASSERT(mDevice != VK_NULL_HANDLE);
3765     ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
3766 
3767     // Check if the current device supports present on this surface.
3768     VkBool32 supportsPresent = VK_FALSE;
3769     ANGLE_VK_TRY(context,
3770                  vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
3771                                                       surface, &supportsPresent));
3772 
3773     *supportedOut = supportsPresent == VK_TRUE;
3774     return angle::Result::Continue;
3775 }
3776 
getVendorString() const3777 std::string Renderer::getVendorString() const
3778 {
3779     return GetVendorString(mPhysicalDeviceProperties.vendorID);
3780 }
3781 
getRendererDescription() const3782 std::string Renderer::getRendererDescription() const
3783 {
3784     std::stringstream strstr;
3785 
3786     uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
3787 
3788     strstr << "Vulkan ";
3789     strstr << VK_VERSION_MAJOR(apiVersion) << ".";
3790     strstr << VK_VERSION_MINOR(apiVersion) << ".";
3791     strstr << VK_VERSION_PATCH(apiVersion);
3792 
3793     strstr << " (";
3794 
3795     // In the case of NVIDIA, deviceName does not necessarily contain "NVIDIA". Add "NVIDIA" so that
3796     // Vulkan end2end tests can be selectively disabled on NVIDIA. TODO(jmadill): should not be
3797     // needed after http://anglebug.com/1874 is fixed and end2end_tests use more sophisticated
3798     // driver detection.
3799     if (mPhysicalDeviceProperties.vendorID == VENDOR_ID_NVIDIA)
3800     {
3801         strstr << GetVendorString(mPhysicalDeviceProperties.vendorID) << " ";
3802     }
3803 
3804     strstr << mPhysicalDeviceProperties.deviceName;
3805     strstr << " (" << gl::FmtHex(mPhysicalDeviceProperties.deviceID) << ")";
3806 
3807     strstr << ")";
3808 
3809     return strstr.str();
3810 }
3811 
getVersionString(bool includeFullVersion) const3812 std::string Renderer::getVersionString(bool includeFullVersion) const
3813 {
3814     std::stringstream strstr;
3815 
3816     uint32_t driverVersion = mPhysicalDeviceProperties.driverVersion;
3817     std::string driverName = std::string(mDriverProperties.driverName);
3818 
3819     if (!driverName.empty())
3820     {
3821         strstr << driverName;
3822     }
3823     else
3824     {
3825         strstr << GetVendorString(mPhysicalDeviceProperties.vendorID);
3826     }
3827 
3828     if (includeFullVersion)
3829     {
3830         strstr << "-";
3831 
3832         if (mPhysicalDeviceProperties.vendorID == VENDOR_ID_NVIDIA)
3833         {
3834             strstr << ANGLE_VK_VERSION_MAJOR_NVIDIA(driverVersion) << ".";
3835             strstr << ANGLE_VK_VERSION_MINOR_NVIDIA(driverVersion) << ".";
3836             strstr << ANGLE_VK_VERSION_SUB_MINOR_NVIDIA(driverVersion) << ".";
3837             strstr << ANGLE_VK_VERSION_PATCH_NVIDIA(driverVersion);
3838         }
3839         else if (mPhysicalDeviceProperties.vendorID == VENDOR_ID_INTEL && IsWindows())
3840         {
3841             strstr << ANGLE_VK_VERSION_MAJOR_WIN_INTEL(driverVersion) << ".";
3842             strstr << ANGLE_VK_VERSION_MINOR_WIN_INTEL(driverVersion);
3843         }
3844         // All other drivers use the Vulkan standard
3845         else
3846         {
3847             strstr << VK_VERSION_MAJOR(driverVersion) << ".";
3848             strstr << VK_VERSION_MINOR(driverVersion) << ".";
3849             strstr << VK_VERSION_PATCH(driverVersion);
3850         }
3851     }
3852 
3853     return strstr.str();
3854 }
3855 
getMaxSupportedESVersion() const3856 gl::Version Renderer::getMaxSupportedESVersion() const
3857 {
3858     // Current highest supported version
3859     gl::Version maxVersion = gl::Version(3, 2);
3860 
3861     // Early out without downgrading ES version if mock ICD enabled.
3862     // Mock ICD doesn't expose sufficient capabilities yet.
3863     // https://github.com/KhronosGroup/Vulkan-Tools/issues/84
3864     if (isMockICDEnabled())
3865     {
3866         return maxVersion;
3867     }
3868 
3869     // Limit to ES3.1 if there are any blockers for 3.2.
3870     ensureCapsInitialized();
3871     if (!mFeatures.exposeNonConformantExtensionsAndVersions.enabled &&
3872         !CanSupportGLES32(mNativeExtensions))
3873     {
3874         maxVersion = LimitVersionTo(maxVersion, {3, 1});
3875     }
3876 
3877     // Limit to ES3.0 if there are any blockers for 3.1.
3878 
3879     // ES3.1 requires at least one atomic counter buffer and four storage buffers in compute.
3880     // Atomic counter buffers are emulated with storage buffers.  For simplicity, we always support
3881     // either none or IMPLEMENTATION_MAX_ATOMIC_COUNTER_BUFFERS atomic counter buffers.  So if
3882     // Vulkan doesn't support at least that many storage buffers in compute, we don't support 3.1.
3883     const uint32_t kMinimumStorageBuffersForES31 =
3884         gl::limits::kMinimumComputeStorageBuffers +
3885         gl::IMPLEMENTATION_MAX_ATOMIC_COUNTER_BUFFER_BINDINGS;
3886     if (mPhysicalDeviceProperties.limits.maxPerStageDescriptorStorageBuffers <
3887         kMinimumStorageBuffersForES31)
3888     {
3889         maxVersion = LimitVersionTo(maxVersion, {3, 0});
3890     }
3891 
3892     // ES3.1 requires at least a maximum offset of at least 2047.
3893     // If the Vulkan implementation can't support that, we cannot support 3.1.
3894     if (mPhysicalDeviceProperties.limits.maxVertexInputAttributeOffset < 2047)
3895     {
3896         maxVersion = LimitVersionTo(maxVersion, {3, 0});
3897     }
3898 
3899     // SSO is in ES3.1 core, so we have to cap to ES3.0 for SSO disablement.
3900     if (mFeatures.disableSeparateShaderObjects.enabled)
3901     {
3902         maxVersion = LimitVersionTo(maxVersion, {3, 0});
3903     }
3904 
3905     // Limit to ES2.0 if there are any blockers for 3.0.
3906     // TODO: http://anglebug.com/3972 Limit to GLES 2.0 if flat shading can't be emulated
3907 
3908     // Multisample textures (ES3.1) and multisample renderbuffers (ES3.0) require the Vulkan driver
3909     // to support the standard sample locations (in order to pass dEQP tests that check these
3910     // locations).  If the Vulkan implementation can't support that, we cannot support 3.0/3.1.
3911     if (mPhysicalDeviceProperties.limits.standardSampleLocations != VK_TRUE)
3912     {
3913         maxVersion = LimitVersionTo(maxVersion, {2, 0});
3914     }
3915 
3916     // If independentBlend is not supported, we can't have a mix of has-alpha and emulated-alpha
3917     // render targets in a framebuffer.  We also cannot perform masked clears of multiple render
3918     // targets.
3919     if (!mPhysicalDeviceFeatures.independentBlend)
3920     {
3921         maxVersion = LimitVersionTo(maxVersion, {2, 0});
3922     }
3923 
3924     // If the Vulkan transform feedback extension is not present, we use an emulation path that
3925     // requires the vertexPipelineStoresAndAtomics feature. Without the extension or this feature,
3926     // we can't currently support transform feedback.
3927     if (!vk::CanSupportTransformFeedbackExtension(mTransformFeedbackFeatures) &&
3928         !vk::CanSupportTransformFeedbackEmulation(mPhysicalDeviceFeatures))
3929     {
3930         maxVersion = LimitVersionTo(maxVersion, {2, 0});
3931     }
3932 
3933     // Limit to GLES 2.0 if maxPerStageDescriptorUniformBuffers is too low.
3934     // Table 6.31 MAX_VERTEX_UNIFORM_BLOCKS minimum value = 12
3935     // Table 6.32 MAX_FRAGMENT_UNIFORM_BLOCKS minimum value = 12
3936     // NOTE: We reserve some uniform buffers for emulation, so use the NativeCaps which takes this
3937     // into account, rather than the physical device maxPerStageDescriptorUniformBuffers limits.
3938     for (gl::ShaderType shaderType : gl::AllShaderTypes())
3939     {
3940         if (static_cast<GLuint>(getNativeCaps().maxShaderUniformBlocks[shaderType]) <
3941             gl::limits::kMinimumShaderUniformBlocks)
3942         {
3943             maxVersion = LimitVersionTo(maxVersion, {2, 0});
3944         }
3945     }
3946 
3947     // Limit to GLES 2.0 if maxVertexOutputComponents is too low.
3948     // Table 6.31 MAX VERTEX OUTPUT COMPONENTS minimum value = 64
3949     // NOTE: We reserve some vertex output components for emulation, so use the NativeCaps which
3950     // takes this into account, rather than the physical device maxVertexOutputComponents limits.
3951     if (static_cast<GLuint>(getNativeCaps().maxVertexOutputComponents) <
3952         gl::limits::kMinimumVertexOutputComponents)
3953     {
3954         maxVersion = LimitVersionTo(maxVersion, {2, 0});
3955     }
3956 
3957     return maxVersion;
3958 }
3959 
getMaxConformantESVersion() const3960 gl::Version Renderer::getMaxConformantESVersion() const
3961 {
3962     const gl::Version maxSupportedESVersion = getMaxSupportedESVersion();
3963     const bool hasGeometryAndTessSupport =
3964         getNativeExtensions().geometryShaderAny() && getNativeExtensions().tessellationShaderAny();
3965 
3966     if (!hasGeometryAndTessSupport || !mFeatures.exposeNonConformantExtensionsAndVersions.enabled)
3967     {
3968         return LimitVersionTo(maxSupportedESVersion, {3, 1});
3969     }
3970 
3971     return maxSupportedESVersion;
3972 }
3973 
getDeviceVersion()3974 uint32_t Renderer::getDeviceVersion()
3975 {
3976     return mDeviceVersion == 0 ? mInstanceVersion : mDeviceVersion;
3977 }
3978 
queryAndCacheFragmentShadingRates()3979 void Renderer::queryAndCacheFragmentShadingRates()
3980 {
3981     // Init required functions
3982 #if !defined(ANGLE_SHARED_LIBVULKAN)
3983     InitFragmentShadingRateKHRInstanceFunction(mInstance);
3984 #endif  // !defined(ANGLE_SHARED_LIBVULKAN)
3985     ASSERT(vkGetPhysicalDeviceFragmentShadingRatesKHR);
3986 
3987     // Query number of supported shading rates first
3988     uint32_t shadingRatesCount = 0;
3989     VkResult result =
3990         vkGetPhysicalDeviceFragmentShadingRatesKHR(mPhysicalDevice, &shadingRatesCount, nullptr);
3991     ASSERT(result == VK_SUCCESS);
3992     ASSERT(shadingRatesCount > 0);
3993 
3994     std::vector<VkPhysicalDeviceFragmentShadingRateKHR> shadingRates(
3995         shadingRatesCount,
3996         {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR, nullptr, 0, {0, 0}});
3997 
3998     // Query supported shading rates
3999     result = vkGetPhysicalDeviceFragmentShadingRatesKHR(mPhysicalDevice, &shadingRatesCount,
4000                                                         shadingRates.data());
4001     ASSERT(result == VK_SUCCESS);
4002 
4003     // Cache supported fragment shading rates
4004     mSupportedFragmentShadingRates.reset();
4005     mSupportedFragmentShadingRateSampleCounts.fill(0u);
4006     for (const VkPhysicalDeviceFragmentShadingRateKHR &shadingRate : shadingRates)
4007     {
4008         if (shadingRate.sampleCounts == 0)
4009         {
4010             continue;
4011         }
4012         const gl::ShadingRate rate = GetShadingRateFromVkExtent(shadingRate.fragmentSize);
4013         mSupportedFragmentShadingRates.set(rate);
4014         mSupportedFragmentShadingRateSampleCounts[rate] = shadingRate.sampleCounts;
4015     }
4016 }
4017 
canSupportFragmentShadingRate() const4018 bool Renderer::canSupportFragmentShadingRate() const
4019 {
4020     // VK_KHR_create_renderpass2 is required for VK_KHR_fragment_shading_rate
4021     if (!mFeatures.supportsRenderpass2.enabled)
4022     {
4023         return false;
4024     }
4025 
4026     // Device needs to support VK_KHR_fragment_shading_rate and specifically
4027     // pipeline fragment shading rate.
4028     if (mFragmentShadingRateFeatures.pipelineFragmentShadingRate != VK_TRUE)
4029     {
4030         return false;
4031     }
4032 
4033     ASSERT(mSupportedFragmentShadingRates.any());
4034 
4035     // To implement GL_QCOM_shading_rate extension the Vulkan ICD needs to support at least the
4036     // following shading rates -
4037     //     {1, 1}
4038     //     {1, 2}
4039     //     {2, 1}
4040     //     {2, 2}
4041     return mSupportedFragmentShadingRates.test(gl::ShadingRate::_1x1) &&
4042            mSupportedFragmentShadingRates.test(gl::ShadingRate::_1x2) &&
4043            mSupportedFragmentShadingRates.test(gl::ShadingRate::_2x1) &&
4044            mSupportedFragmentShadingRates.test(gl::ShadingRate::_2x2);
4045 }
4046 
canSupportFoveatedRendering() const4047 bool Renderer::canSupportFoveatedRendering() const
4048 {
4049     // Device needs to support attachment fragment shading rate.
4050     if (mFragmentShadingRateFeatures.attachmentFragmentShadingRate != VK_TRUE)
4051     {
4052         return false;
4053     }
4054 
4055     ASSERT(mSupportedFragmentShadingRates.any());
4056     ASSERT(!mSupportedFragmentShadingRateSampleCounts.empty());
4057 
4058     // To implement QCOM foveated rendering extensions the Vulkan ICD needs to support all sample
4059     // count bits listed in VkPhysicalDeviceLimits::framebufferColorSampleCounts for these shading
4060     // rates -
4061     //     {1, 1}
4062     //     {1, 2}
4063     //     {2, 1}
4064     //     {2, 2}
4065     VkSampleCountFlags framebufferSampleCounts =
4066         getPhysicalDeviceProperties().limits.framebufferColorSampleCounts &
4067         vk_gl::kSupportedSampleCounts;
4068 
4069     return (mSupportedFragmentShadingRateSampleCounts[gl::ShadingRate::_1x1] &
4070             framebufferSampleCounts) == framebufferSampleCounts &&
4071            (mSupportedFragmentShadingRateSampleCounts[gl::ShadingRate::_1x2] &
4072             framebufferSampleCounts) == framebufferSampleCounts &&
4073            (mSupportedFragmentShadingRateSampleCounts[gl::ShadingRate::_2x1] &
4074             framebufferSampleCounts) == framebufferSampleCounts &&
4075            (mSupportedFragmentShadingRateSampleCounts[gl::ShadingRate::_2x2] &
4076             framebufferSampleCounts) == framebufferSampleCounts;
4077 }
4078 
canPreferDeviceLocalMemoryHostVisible(VkPhysicalDeviceType deviceType)4079 bool Renderer::canPreferDeviceLocalMemoryHostVisible(VkPhysicalDeviceType deviceType)
4080 {
4081     if (deviceType == VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU)
4082     {
4083         const vk::MemoryProperties &memoryProperties = getMemoryProperties();
4084         static constexpr VkMemoryPropertyFlags kHostVisiableDeviceLocalFlags =
4085             VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
4086         VkDeviceSize minHostVisiableDeviceLocalHeapSize = std::numeric_limits<VkDeviceSize>::max();
4087         VkDeviceSize maxDeviceLocalHeapSize             = 0;
4088         for (uint32_t i = 0; i < memoryProperties.getMemoryTypeCount(); ++i)
4089         {
4090             if ((memoryProperties.getMemoryType(i).propertyFlags &
4091                  VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
4092             {
4093                 maxDeviceLocalHeapSize =
4094                     std::max(maxDeviceLocalHeapSize, memoryProperties.getHeapSizeForMemoryType(i));
4095             }
4096             if ((memoryProperties.getMemoryType(i).propertyFlags & kHostVisiableDeviceLocalFlags) ==
4097                 kHostVisiableDeviceLocalFlags)
4098             {
4099                 minHostVisiableDeviceLocalHeapSize =
4100                     std::min(minHostVisiableDeviceLocalHeapSize,
4101                              memoryProperties.getHeapSizeForMemoryType(i));
4102             }
4103         }
4104         return minHostVisiableDeviceLocalHeapSize != std::numeric_limits<VkDeviceSize>::max() &&
4105                minHostVisiableDeviceLocalHeapSize >=
4106                    static_cast<VkDeviceSize>(maxDeviceLocalHeapSize * 0.8);
4107     }
4108     return deviceType != VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU;
4109 }
4110 
initFeatures(const vk::ExtensionNameList & deviceExtensionNames,const angle::FeatureOverrides & featureOverrides,UseVulkanSwapchain useVulkanSwapchain,angle::NativeWindowSystem nativeWindowSystem)4111 void Renderer::initFeatures(const vk::ExtensionNameList &deviceExtensionNames,
4112                             const angle::FeatureOverrides &featureOverrides,
4113                             UseVulkanSwapchain useVulkanSwapchain,
4114                             angle::NativeWindowSystem nativeWindowSystem)
4115 {
4116     ApplyFeatureOverrides(&mFeatures, featureOverrides);
4117 
4118     if (featureOverrides.allDisabled)
4119     {
4120         return;
4121     }
4122 
4123     const bool isAMD      = IsAMD(mPhysicalDeviceProperties.vendorID);
4124     const bool isApple    = IsAppleGPU(mPhysicalDeviceProperties.vendorID);
4125     const bool isARM      = IsARM(mPhysicalDeviceProperties.vendorID);
4126     const bool isIntel    = IsIntel(mPhysicalDeviceProperties.vendorID);
4127     const bool isNvidia   = IsNvidia(mPhysicalDeviceProperties.vendorID);
4128     const bool isPowerVR  = IsPowerVR(mPhysicalDeviceProperties.vendorID);
4129     const bool isQualcomm = IsQualcomm(mPhysicalDeviceProperties.vendorID);
4130     const bool isBroadcom = IsBroadcom(mPhysicalDeviceProperties.vendorID);
4131     const bool isSamsung  = IsSamsung(mPhysicalDeviceProperties.vendorID);
4132     const bool isSwiftShader =
4133         IsSwiftshader(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID);
4134 
4135     const bool isGalaxyS23 =
4136         IsGalaxyS23(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID);
4137 
4138     // Distinguish between the open source and proprietary Qualcomm drivers
4139     const bool isQualcommOpenSource =
4140         IsQualcommOpenSource(mPhysicalDeviceProperties.vendorID, mDriverProperties.driverID,
4141                              mPhysicalDeviceProperties.deviceName);
4142     const bool isQualcommProprietary = isQualcomm && !isQualcommOpenSource;
4143 
4144     // Lacking other explicit ways to tell if mali GPU is job manager based or command stream front
4145     // end based, we use maxDrawIndirectCount as equivalent since all JM based has
4146     // maxDrawIndirectCount==1 and all CSF based has maxDrawIndirectCount>1.
4147     bool isMaliJobManagerBasedGPU =
4148         isARM && getPhysicalDeviceProperties().limits.maxDrawIndirectCount <= 1;
4149     // Parse the ARM driver version to be readable/comparable
4150     const ARMDriverVersion armDriverVersion =
4151         ParseARMVulkanDriverVersion(mPhysicalDeviceProperties.driverVersion);
4152 
4153     // Parse the Qualcomm driver version.
4154     const QualcommDriverVersion qualcommDriverVersion =
4155         ParseQualcommVulkanDriverVersion(mPhysicalDeviceProperties.driverVersion);
4156 
4157     // Parse the Intel driver version. (Currently it only supports the Windows driver.)
4158     const IntelDriverVersion intelDriverVersion =
4159         ParseIntelWindowsDriverVersion(mPhysicalDeviceProperties.driverVersion);
4160 
4161     // Distinguish between the mesa and proprietary drivers
4162     const bool isRADV = IsRADV(mPhysicalDeviceProperties.vendorID, mDriverProperties.driverID,
4163                                mPhysicalDeviceProperties.deviceName);
4164 
4165     angle::VersionInfo nvidiaVersion;
4166     if (isNvidia)
4167     {
4168         nvidiaVersion = angle::ParseNvidiaDriverVersion(mPhysicalDeviceProperties.driverVersion);
4169     }
4170 
4171     angle::VersionInfo mesaVersion;
4172     if (isIntel && IsLinux())
4173     {
4174         mesaVersion = angle::ParseMesaDriverVersion(mPhysicalDeviceProperties.driverVersion);
4175     }
4176 
4177     // Classify devices based on general architecture:
4178     //
4179     // - IMR (Immediate-Mode Rendering) devices generally progress through draw calls once and use
4180     //   the main GPU memory (accessed through caches) to store intermediate rendering results.
4181     // - TBR (Tile-Based Rendering) devices issue a pre-rendering geometry pass, then run through
4182     //   draw calls once per tile and store intermediate rendering results on the tile cache.
4183     //
4184     // Due to these key architectural differences, some operations improve performance on one while
4185     // deteriorating performance on the other.  ANGLE will accordingly make some decisions based on
4186     // the device architecture for optimal performance on both.
4187     const bool isImmediateModeRenderer = isNvidia || isAMD || isIntel || isSamsung || isSwiftShader;
4188     const bool isTileBasedRenderer     = isARM || isPowerVR || isQualcomm || isBroadcom || isApple;
4189 
4190     // Make sure all known architectures are accounted for.
4191     if (!isImmediateModeRenderer && !isTileBasedRenderer && !isMockICDEnabled())
4192     {
4193         WARN() << "Unknown GPU architecture";
4194     }
4195 
4196     ANGLE_FEATURE_CONDITION(&mFeatures, appendAliasedMemoryDecorations, true);
4197 
4198     ANGLE_FEATURE_CONDITION(
4199         &mFeatures, supportsSharedPresentableImageExtension,
4200         ExtensionFound(VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME, deviceExtensionNames));
4201 
4202     ANGLE_FEATURE_CONDITION(&mFeatures, supportsGetMemoryRequirements2, true);
4203 
4204     ANGLE_FEATURE_CONDITION(&mFeatures, supportsBindMemory2, true);
4205 
4206     ANGLE_FEATURE_CONDITION(&mFeatures, bresenhamLineRasterization,
4207                             mLineRasterizationFeatures.bresenhamLines == VK_TRUE);
4208 
4209     ANGLE_FEATURE_CONDITION(&mFeatures, provokingVertex,
4210                             mProvokingVertexFeatures.provokingVertexLast == VK_TRUE);
4211 
4212     // http://b/208458772. ARM driver supports this protected memory extension but we are seeing
4213     // excessive load/store unit activity when this extension is enabled, even if not been used.
4214     // Disable this extension on older ARM platforms that don't support
4215     // VK_EXT_pipeline_protected_access.
4216     // http://anglebug.com/7714
4217     ANGLE_FEATURE_CONDITION(
4218         &mFeatures, supportsProtectedMemory,
4219         mProtectedMemoryFeatures.protectedMemory == VK_TRUE &&
4220             (!isARM || mPipelineProtectedAccessFeatures.pipelineProtectedAccess == VK_TRUE));
4221 
4222     ANGLE_FEATURE_CONDITION(&mFeatures, supportsHostQueryReset,
4223                             mHostQueryResetFeatures.hostQueryReset == VK_TRUE);
4224     // Avoid any inefficiency that may be caused by host image copy by default.  To be experimented
4225     // with to see on which hardware VkHostImageCopyDevicePerformanceQueryEXT::optimalDeviceAccess
4226     // is really performing as well as
4227     // VkHostImageCopyDevicePerformanceQueryEXT::identicalMemoryLayout.
4228     ANGLE_FEATURE_CONDITION(&mFeatures, allowHostImageCopyDespiteNonIdenticalLayout, false);
4229 
4230     // VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL and
4231     // VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL are introduced by
4232     // VK_KHR_maintenance2 and promoted to Vulkan 1.1.
4233     ANGLE_FEATURE_CONDITION(&mFeatures, supportsMixedReadWriteDepthStencilLayouts, true);
4234 
4235     // VK_EXT_pipeline_creation_feedback is promoted to core in Vulkan 1.3.
4236     ANGLE_FEATURE_CONDITION(
4237         &mFeatures, supportsPipelineCreationFeedback,
4238         ExtensionFound(VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME, deviceExtensionNames));
4239 
4240     // Note: Protected Swapchains is not determined until we have a VkSurface to query.
4241     // So here vendors should indicate support so that protected_content extension
4242     // is enabled.
4243     ANGLE_FEATURE_CONDITION(&mFeatures, supportsSurfaceProtectedSwapchains, IsAndroid());
4244 
4245     // Work around incorrect NVIDIA point size range clamping.
4246     // http://anglebug.com/2970#c10
4247     // Clamp if driver version is:
4248     //   < 430 on Windows
4249     //   < 421 otherwise
4250     ANGLE_FEATURE_CONDITION(&mFeatures, clampPointSize,
4251                             isNvidia && nvidiaVersion.major < uint32_t(IsWindows() ? 430 : 421));
4252 
4253     // Affecting Nvidia drivers 535 through 551.
4254     ANGLE_FEATURE_CONDITION(&mFeatures, avoidOpSelectWithMismatchingRelaxedPrecision,
4255                             isNvidia && (nvidiaVersion.major >= 535 && nvidiaVersion.major <= 551));
4256 
4257     // Vulkan implementations are not required to clamp gl_FragDepth to [0, 1] by default.
4258     ANGLE_FEATURE_CONDITION(&mFeatures, supportsDepthClampZeroOne,
4259                             mDepthClampZeroOneFeatures.depthClampZeroOne == VK_TRUE);
4260 
4261     ANGLE_FEATURE_CONDITION(&mFeatures, clampFragDepth,
4262                             isNvidia && !mFeatures.supportsDepthClampZeroOne.enabled);
4263 
4264     ANGLE_FEATURE_CONDITION(
4265         &mFeatures, supportsRenderpass2,
4266         ExtensionFound(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME, deviceExtensionNames));
4267 
4268     ANGLE_FEATURE_CONDITION(
4269         &mFeatures, supportsIncrementalPresent,
4270         ExtensionFound(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, deviceExtensionNames));
4271 
4272 #if defined(ANGLE_PLATFORM_ANDROID)
4273     ANGLE_FEATURE_CONDITION(
4274         &mFeatures, supportsAndroidHardwareBuffer,
4275         IsAndroid() &&
4276             ExtensionFound(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
4277                            deviceExtensionNames) &&
4278             ExtensionFound(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, deviceExtensionNames));
4279 #endif
4280 
4281 #if defined(ANGLE_PLATFORM_GGP)
4282     ANGLE_FEATURE_CONDITION(
4283         &mFeatures, supportsGGPFrameToken,
4284         ExtensionFound(VK_GGP_FRAME_TOKEN_EXTENSION_NAME, deviceExtensionNames));
4285 #endif
4286 
4287     ANGLE_FEATURE_CONDITION(
4288         &mFeatures, supportsExternalMemoryFd,
4289         ExtensionFound(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, deviceExtensionNames));
4290 
4291 #if defined(ANGLE_PLATFORM_WINDOWS)
4292     ANGLE_FEATURE_CONDITION(
4293         &mFeatures, supportsFullScreenExclusive,
4294         ExtensionFound(VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME, deviceExtensionNames));
4295 
4296     // On Windows+AMD, drivers before version 0x800106 (2.0.262) would
4297     // implicitly enable VK_EXT_full_screen_exclusive and start returning
4298     // extension-specific error codes in swapchain functions. Since the
4299     // extension was not enabled by ANGLE, it was impossible to handle these
4300     // error codes correctly. On these earlier drivers, we want to explicitly
4301     // enable the extension and opt out of it to avoid seeing those error codes
4302     // entirely.
4303     ANGLE_FEATURE_CONDITION(&mFeatures, forceDisableFullScreenExclusive,
4304                             isAMD && mPhysicalDeviceProperties.driverVersion < 0x800106);
4305 #endif
4306 
4307     ANGLE_FEATURE_CONDITION(
4308         &mFeatures, supportsExternalMemoryFuchsia,
4309         ExtensionFound(VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME, deviceExtensionNames));
4310 
4311     ANGLE_FEATURE_CONDITION(
4312         &mFeatures, supportsExternalSemaphoreFd,
4313         ExtensionFound(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, deviceExtensionNames));
4314 
4315     ANGLE_FEATURE_CONDITION(
4316         &mFeatures, supportsExternalSemaphoreFuchsia,
4317         ExtensionFound(VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME, deviceExtensionNames));
4318 
4319     ANGLE_FEATURE_CONDITION(
4320         &mFeatures, supportsExternalFenceFd,
4321         ExtensionFound(VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME, deviceExtensionNames));
4322 
4323 #if defined(ANGLE_PLATFORM_ANDROID) || defined(ANGLE_PLATFORM_LINUX)
4324     if (mFeatures.supportsExternalFenceCapabilities.enabled &&
4325         mFeatures.supportsExternalSemaphoreCapabilities.enabled)
4326     {
4327         VkExternalFenceProperties externalFenceProperties = {};
4328         externalFenceProperties.sType = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES;
4329 
4330         VkPhysicalDeviceExternalFenceInfo externalFenceInfo = {};
4331         externalFenceInfo.sType      = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO;
4332         externalFenceInfo.handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
4333 
4334         vkGetPhysicalDeviceExternalFenceProperties(mPhysicalDevice, &externalFenceInfo,
4335                                                    &externalFenceProperties);
4336 
4337         VkExternalSemaphoreProperties externalSemaphoreProperties = {};
4338         externalSemaphoreProperties.sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES;
4339 
4340         VkPhysicalDeviceExternalSemaphoreInfo externalSemaphoreInfo = {};
4341         externalSemaphoreInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO;
4342         externalSemaphoreInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
4343 
4344         vkGetPhysicalDeviceExternalSemaphoreProperties(mPhysicalDevice, &externalSemaphoreInfo,
4345                                                        &externalSemaphoreProperties);
4346 
4347         ANGLE_FEATURE_CONDITION(
4348             &mFeatures, supportsAndroidNativeFenceSync,
4349             (mFeatures.supportsExternalFenceFd.enabled &&
4350              FencePropertiesCompatibleWithAndroid(externalFenceProperties) &&
4351              mFeatures.supportsExternalSemaphoreFd.enabled &&
4352              SemaphorePropertiesCompatibleWithAndroid(externalSemaphoreProperties)));
4353     }
4354     else
4355     {
4356         ANGLE_FEATURE_CONDITION(&mFeatures, supportsAndroidNativeFenceSync,
4357                                 (mFeatures.supportsExternalFenceFd.enabled &&
4358                                  mFeatures.supportsExternalSemaphoreFd.enabled));
4359     }
4360 #endif  // defined(ANGLE_PLATFORM_ANDROID) || defined(ANGLE_PLATFORM_LINUX)
4361 
4362     // Disabled on SwiftShader due to http://crbug.com/40942995
4363     ANGLE_FEATURE_CONDITION(
4364         &mFeatures, supportsShaderStencilExport,
4365         ExtensionFound(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, deviceExtensionNames) &&
4366             !isSwiftShader);
4367 
4368     ANGLE_FEATURE_CONDITION(
4369         &mFeatures, supportsRenderPassLoadStoreOpNone,
4370         ExtensionFound(VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME, deviceExtensionNames));
4371 
4372     ANGLE_FEATURE_CONDITION(&mFeatures, disallowMixedDepthStencilLoadOpNoneAndLoad,
4373                             isARM && armDriverVersion < ARMDriverVersion(38, 1, 0));
4374 
4375     ANGLE_FEATURE_CONDITION(
4376         &mFeatures, supportsRenderPassStoreOpNone,
4377         !mFeatures.supportsRenderPassLoadStoreOpNone.enabled &&
4378             ExtensionFound(VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME, deviceExtensionNames));
4379 
4380     ANGLE_FEATURE_CONDITION(&mFeatures, supportsDepthClipControl,
4381                             mDepthClipControlFeatures.depthClipControl == VK_TRUE);
4382 
4383     ANGLE_FEATURE_CONDITION(
4384         &mFeatures, supportsPrimitiveTopologyListRestart,
4385         mPrimitiveTopologyListRestartFeatures.primitiveTopologyListRestart == VK_TRUE);
4386 
4387     ANGLE_FEATURE_CONDITION(
4388         &mFeatures, supportsBlendOperationAdvanced,
4389         ExtensionFound(VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME, deviceExtensionNames));
4390 
4391     ANGLE_FEATURE_CONDITION(
4392         &mFeatures, supportsFormatFeatureFlags2,
4393         ExtensionFound(VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME, deviceExtensionNames));
4394 
4395     ANGLE_FEATURE_CONDITION(&mFeatures, supportsTransformFeedbackExtension,
4396                             vk::CanSupportTransformFeedbackExtension(mTransformFeedbackFeatures));
4397 
4398     ANGLE_FEATURE_CONDITION(&mFeatures, supportsGeometryStreamsCapability,
4399                             mFeatures.supportsTransformFeedbackExtension.enabled &&
4400                                 mTransformFeedbackFeatures.geometryStreams == VK_TRUE);
4401 
4402     ANGLE_FEATURE_CONDITION(
4403         &mFeatures, supportsPrimitivesGeneratedQuery,
4404         mFeatures.supportsTransformFeedbackExtension.enabled &&
4405             mPrimitivesGeneratedQueryFeatures.primitivesGeneratedQuery == VK_TRUE);
4406 
4407     ANGLE_FEATURE_CONDITION(&mFeatures, emulateTransformFeedback,
4408                             !mFeatures.supportsTransformFeedbackExtension.enabled &&
4409                                 vk::CanSupportTransformFeedbackEmulation(mPhysicalDeviceFeatures));
4410 
4411     ANGLE_FEATURE_CONDITION(&mFeatures, supportsIndexTypeUint8,
4412                             mIndexTypeUint8Features.indexTypeUint8 == VK_TRUE);
4413 
4414     ANGLE_FEATURE_CONDITION(&mFeatures, supportsDepthStencilResolve,
4415                             mFeatures.supportsRenderpass2.enabled &&
4416                                 mDepthStencilResolveProperties.supportedDepthResolveModes != 0);
4417     ANGLE_FEATURE_CONDITION(&mFeatures, supportsDepthStencilIndependentResolveNone,
4418                             mFeatures.supportsDepthStencilResolve.enabled &&
4419                                 mDepthStencilResolveProperties.independentResolveNone);
4420     // Disable optimizing depth/stencil resolve through glBlitFramebuffer for buggy drivers:
4421     //
4422     // - Nvidia: http://anglebug.com/8658
4423     // - Pixel4: http://anglebug.com/8659
4424     //
4425     ANGLE_FEATURE_CONDITION(&mFeatures, disableDepthStencilResolveThroughAttachment,
4426                             isNvidia || isQualcommProprietary);
4427 
4428     ANGLE_FEATURE_CONDITION(
4429         &mFeatures, supportsMultisampledRenderToSingleSampled,
4430         mFeatures.supportsRenderpass2.enabled && mFeatures.supportsDepthStencilResolve.enabled &&
4431             mMultisampledRenderToSingleSampledFeatures.multisampledRenderToSingleSampled ==
4432                 VK_TRUE);
4433 
4434     // Preferring the MSRTSS flag is for texture initialization. If the MSRTSS is not used at first,
4435     // it will be used (if available) when recreating the image if it is bound to an MSRTT
4436     // framebuffer.
4437     ANGLE_FEATURE_CONDITION(&mFeatures, preferMSRTSSFlagByDefault, isARM);
4438 
4439     ANGLE_FEATURE_CONDITION(&mFeatures, supportsImage2dViewOf3d,
4440                             mImage2dViewOf3dFeatures.image2DViewOf3D == VK_TRUE);
4441 
4442     // Note: sampler2DViewOf3D is only useful for supporting EGL_KHR_gl_texture_3D_image.  If the
4443     // VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT added to 3D images measurable hurts sampling
4444     // performance, it might be better to remove support for EGL_KHR_gl_texture_3D_image in favor of
4445     // faster 3D images.
4446     ANGLE_FEATURE_CONDITION(&mFeatures, supportsSampler2dViewOf3d,
4447                             mFeatures.supportsImage2dViewOf3d.enabled &&
4448                                 mImage2dViewOf3dFeatures.sampler2DViewOf3D == VK_TRUE);
4449 
4450     ANGLE_FEATURE_CONDITION(&mFeatures, supportsMultiview, mMultiviewFeatures.multiview == VK_TRUE);
4451 
4452     // TODO: http://anglebug.com/5927 - drop dependency on customBorderColorWithoutFormat.
4453     ANGLE_FEATURE_CONDITION(
4454         &mFeatures, supportsCustomBorderColor,
4455         mCustomBorderColorFeatures.customBorderColors == VK_TRUE &&
4456             mCustomBorderColorFeatures.customBorderColorWithoutFormat == VK_TRUE);
4457 
4458     ANGLE_FEATURE_CONDITION(&mFeatures, supportsMultiDrawIndirect,
4459                             mPhysicalDeviceFeatures.multiDrawIndirect == VK_TRUE);
4460 
4461     ANGLE_FEATURE_CONDITION(&mFeatures, perFrameWindowSizeQuery,
4462                             IsAndroid() || isIntel || (IsWindows() && isAMD) || IsFuchsia() ||
4463                                 isSamsung ||
4464                                 nativeWindowSystem == angle::NativeWindowSystem::Wayland);
4465 
4466     ANGLE_FEATURE_CONDITION(&mFeatures, padBuffersToMaxVertexAttribStride, isAMD || isSamsung);
4467     mMaxVertexAttribStride = std::min(static_cast<uint32_t>(gl::limits::kMaxVertexAttribStride),
4468                                       mPhysicalDeviceProperties.limits.maxVertexInputBindingStride);
4469 
4470     ANGLE_FEATURE_CONDITION(&mFeatures, forceD16TexFilter, IsAndroid() && isQualcommProprietary);
4471 
4472     ANGLE_FEATURE_CONDITION(&mFeatures, disableFlippingBlitWithCommand,
4473                             IsAndroid() && isQualcommProprietary);
4474 
4475     // Allocation sanitization disabled by default because of a heaveyweight implementation
4476     // that can cause OOM and timeouts.
4477     ANGLE_FEATURE_CONDITION(&mFeatures, allocateNonZeroMemory, false);
4478 
4479     // ARM does buffer copy on geometry pipeline, which may create a GPU pipeline bubble that
4480     // prevents vertex shader to overlap with fragment shader on job manager based architecture. For
4481     // now we always choose CPU to do copy on ARM job manager based GPU.
4482     ANGLE_FEATURE_CONDITION(&mFeatures, preferCPUForBufferSubData, isMaliJobManagerBasedGPU);
4483 
4484     // On android, we usually are GPU limited, we try to use CPU to do data copy when other
4485     // conditions are the same. Set to zero will use GPU to do copy. This is subject to further
4486     // tuning for each platform https://issuetracker.google.com/201826021
4487     mMaxCopyBytesUsingCPUWhenPreservingBufferData =
4488         IsAndroid() ? std::numeric_limits<uint32_t>::max() : 0;
4489 
4490     ANGLE_FEATURE_CONDITION(&mFeatures, persistentlyMappedBuffers, true);
4491 
4492     ANGLE_FEATURE_CONDITION(&mFeatures, logMemoryReportCallbacks, false);
4493     ANGLE_FEATURE_CONDITION(&mFeatures, logMemoryReportStats, false);
4494 
4495     ANGLE_FEATURE_CONDITION(
4496         &mFeatures, supportsExternalMemoryDmaBufAndModifiers,
4497         ExtensionFound(VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME, deviceExtensionNames) &&
4498             ExtensionFound(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME, deviceExtensionNames));
4499 
4500     // Android pre-rotation support can be disabled.
4501     ANGLE_FEATURE_CONDITION(&mFeatures, enablePreRotateSurfaces, IsAndroid());
4502 
4503     // http://anglebug.com/3078
4504     // Precision qualifiers are disabled for Pixel 2 before the driver included relaxed precision.
4505     ANGLE_FEATURE_CONDITION(
4506         &mFeatures, enablePrecisionQualifiers,
4507         !(IsPixel2(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID) &&
4508           (qualcommDriverVersion < QualcommDriverVersion(512, 490, 0))) &&
4509             !IsPixel4(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID));
4510 
4511     // http://anglebug.com/7488
4512     ANGLE_FEATURE_CONDITION(&mFeatures, varyingsRequireMatchingPrecisionInSpirv, isPowerVR);
4513 
4514     // IMR devices are less sensitive to the src/dst stage masks in barriers, and behave more
4515     // efficiently when all barriers are aggregated, rather than individually and precisely
4516     // specified.
4517     ANGLE_FEATURE_CONDITION(&mFeatures, preferAggregateBarrierCalls, isImmediateModeRenderer);
4518 
4519     // For IMR devices, it's more efficient to ignore invalidate of framebuffer attachments with
4520     // emulated formats that have extra channels.  For TBR devices, the invalidate will be followed
4521     // by a clear to retain valid values in said extra channels.
4522     ANGLE_FEATURE_CONDITION(&mFeatures, preferSkippingInvalidateForEmulatedFormats,
4523                             isImmediateModeRenderer);
4524 
4525     // Currently disabled by default: http://anglebug.com/4324
4526     ANGLE_FEATURE_CONDITION(&mFeatures, asyncCommandQueue, false);
4527 
4528     ANGLE_FEATURE_CONDITION(&mFeatures, asyncCommandBufferReset, true);
4529 
4530     ANGLE_FEATURE_CONDITION(&mFeatures, supportsYUVSamplerConversion,
4531                             mSamplerYcbcrConversionFeatures.samplerYcbcrConversion != VK_FALSE);
4532 
4533     ANGLE_FEATURE_CONDITION(&mFeatures, supportsShaderFloat16,
4534                             mShaderFloat16Int8Features.shaderFloat16 == VK_TRUE);
4535 
4536     // Prefer driver uniforms over specialization constants in the following:
4537     //
4538     // - Older Qualcomm drivers where specialization constants severely degrade the performance of
4539     //   pipeline creation.  http://issuetracker.google.com/173636783
4540     // - ARM hardware
4541     // - Imagination hardware
4542     // - SwiftShader
4543     //
4544     ANGLE_FEATURE_CONDITION(
4545         &mFeatures, preferDriverUniformOverSpecConst,
4546         (isQualcommProprietary && qualcommDriverVersion < QualcommDriverVersion(512, 513, 0)) ||
4547             isARM || isPowerVR || isSwiftShader);
4548 
4549     // The compute shader used to generate mipmaps needs -
4550     // 1. subgroup quad operations in compute shader stage.
4551     // 2. subgroup operations that can use extended types.
4552     // 3. 256-wide workgroup.
4553     //
4554     // Furthermore, VK_IMAGE_USAGE_STORAGE_BIT is detrimental to performance on many platforms, on
4555     // which this path is not enabled.  Platforms that are known to have better performance with
4556     // this path are:
4557     //
4558     // - AMD
4559     // - Nvidia
4560     // - Samsung
4561     //
4562     // Additionally, this path is disabled on buggy drivers:
4563     //
4564     // - AMD/Windows: Unfortunately the trybots use ancient AMD cards and drivers.
4565     const bool supportsSubgroupQuadOpsInComputeShader =
4566         (mSubgroupProperties.supportedStages & VK_SHADER_STAGE_COMPUTE_BIT) &&
4567         (mSubgroupProperties.supportedOperations & VK_SUBGROUP_FEATURE_QUAD_BIT);
4568 
4569     const uint32_t maxComputeWorkGroupInvocations =
4570         mPhysicalDeviceProperties.limits.maxComputeWorkGroupInvocations;
4571 
4572     ANGLE_FEATURE_CONDITION(&mFeatures, allowGenerateMipmapWithCompute,
4573                             supportsSubgroupQuadOpsInComputeShader &&
4574                                 mSubgroupExtendedTypesFeatures.shaderSubgroupExtendedTypes &&
4575                                 maxComputeWorkGroupInvocations >= 256 &&
4576                                 ((isAMD && !IsWindows()) || isNvidia || isSamsung));
4577 
4578     bool isAdreno540 = mPhysicalDeviceProperties.deviceID == angle::kDeviceID_Adreno540;
4579     ANGLE_FEATURE_CONDITION(&mFeatures, forceMaxUniformBufferSize16KB,
4580                             isQualcommProprietary && isAdreno540);
4581 
4582     ANGLE_FEATURE_CONDITION(
4583         &mFeatures, supportsImageFormatList,
4584         ExtensionFound(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME, deviceExtensionNames));
4585 
4586     ANGLE_FEATURE_CONDITION(
4587         &mFeatures, supportsSamplerMirrorClampToEdge,
4588         ExtensionFound(VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME, deviceExtensionNames));
4589 
4590     // Emulation of GL_EXT_multisampled_render_to_texture is only really useful on tiling hardware,
4591     // but is exposed on any configuration deployed on Android, such as Samsung's AMD-based GPU.
4592     //
4593     // During testing, it was also discovered that emulation triggers bugs on some platforms:
4594     //
4595     // - Swiftshader:
4596     //   * Failure on mac: http://anglebug.com/4937
4597     //   * OOM: http://crbug.com/1263046
4598     // - Intel on windows: http://anglebug.com/5032
4599     // - AMD on windows: http://crbug.com/1132366
4600     // - Old ARM drivers on Android fail multiple tests, though newer drivers don't (although they
4601     //   support MSRTSS and emulation is unnecessary)
4602     //
4603     ANGLE_FEATURE_CONDITION(&mFeatures, allowMultisampledRenderToTextureEmulation,
4604                             (isTileBasedRenderer && !isARM) || isSamsung);
4605     ANGLE_FEATURE_CONDITION(&mFeatures, enableMultisampledRenderToTexture,
4606                             mFeatures.supportsMultisampledRenderToSingleSampled.enabled ||
4607                                 (mFeatures.supportsDepthStencilResolve.enabled &&
4608                                  mFeatures.allowMultisampledRenderToTextureEmulation.enabled));
4609 
4610     // Currently we enable cube map arrays based on the imageCubeArray Vk feature.
4611     // TODO: Check device caps for full cube map array support. http://anglebug.com/5143
4612     ANGLE_FEATURE_CONDITION(&mFeatures, supportsImageCubeArray,
4613                             mPhysicalDeviceFeatures.imageCubeArray == VK_TRUE);
4614 
4615     ANGLE_FEATURE_CONDITION(&mFeatures, supportsPipelineStatisticsQuery,
4616                             mPhysicalDeviceFeatures.pipelineStatisticsQuery == VK_TRUE);
4617 
4618     // Android mistakenly destroys the old swapchain when creating a new one.
4619     ANGLE_FEATURE_CONDITION(&mFeatures, waitIdleBeforeSwapchainRecreation, IsAndroid() && isARM);
4620 
4621     // vkCmdClearAttachments races with draw calls on Qualcomm hardware as observed on Pixel2 and
4622     // Pixel4.  https://issuetracker.google.com/issues/166809097
4623     ANGLE_FEATURE_CONDITION(
4624         &mFeatures, preferDrawClearOverVkCmdClearAttachments,
4625         isQualcommProprietary && qualcommDriverVersion < QualcommDriverVersion(512, 762, 12));
4626 
4627     // r32f image emulation is done unconditionally so VK_FORMAT_FEATURE_STORAGE_*_ATOMIC_BIT is not
4628     // required.
4629     ANGLE_FEATURE_CONDITION(&mFeatures, emulateR32fImageAtomicExchange, true);
4630 
4631     // Whether non-conformant configurations and extensions should be exposed.
4632     ANGLE_FEATURE_CONDITION(&mFeatures, exposeNonConformantExtensionsAndVersions,
4633                             kExposeNonConformantExtensionsAndVersions);
4634 
4635     ANGLE_FEATURE_CONDITION(
4636         &mFeatures, supportsMemoryBudget,
4637         ExtensionFound(VK_EXT_MEMORY_BUDGET_EXTENSION_NAME, deviceExtensionNames));
4638 
4639     // Disabled by default. Only enable it for experimental purpose, as this will cause various
4640     // tests to fail.
4641     ANGLE_FEATURE_CONDITION(&mFeatures, forceFragmentShaderPrecisionHighpToMediump, false);
4642 
4643     // Testing shows that on ARM GPU, doing implicit flush at framebuffer boundary improves
4644     // performance. Most app traces shows frame time reduced and manhattan 3.1 offscreen score
4645     // improves 7%.
4646     ANGLE_FEATURE_CONDITION(&mFeatures, preferSubmitAtFBOBoundary, isARM || isSwiftShader);
4647 
4648     // In order to support immutable samplers tied to external formats, we need to overallocate
4649     // descriptor counts for such immutable samplers
4650     ANGLE_FEATURE_CONDITION(&mFeatures, useMultipleDescriptorsForExternalFormats, true);
4651 
4652     // http://anglebug.com/6651
4653     // When creating a surface with the format GL_RGB8, override the format to be GL_RGBA8, since
4654     // Android prevents creating swapchain images with VK_FORMAT_R8G8B8_UNORM.
4655     // Do this for all platforms, since few (none?) IHVs support 24-bit formats with their HW
4656     // natively anyway.
4657     ANGLE_FEATURE_CONDITION(&mFeatures, overrideSurfaceFormatRGB8ToRGBA8, true);
4658 
4659     // We set
4660     //
4661     // - VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT
4662     // - VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT
4663     //
4664     // when this feature is supported and there is framebuffer fetch.  But the
4665     // check for framebuffer fetch is not accurate enough and those bits can
4666     // have great impact on Qualcomm (it only affects the open source driver
4667     // because the proprietary driver does not expose the extension).  Let's
4668     // disable it on Qualcomm.
4669     //
4670     // https://issuetracker.google.com/issues/255837430
4671     ANGLE_FEATURE_CONDITION(
4672         &mFeatures, supportsRasterizationOrderAttachmentAccess,
4673         !isQualcomm &&
4674             mRasterizationOrderAttachmentAccessFeatures.rasterizationOrderColorAttachmentAccess ==
4675                 VK_TRUE);
4676 
4677     // The VK_EXT_surface_maintenance1 and VK_EXT_swapchain_maintenance1 extensions are used for a
4678     // variety of improvements:
4679     //
4680     // - Recycling present semaphores
4681     // - Avoiding swapchain recreation when present modes change
4682     // - Amortizing the cost of memory allocation for swapchain creation over multiple frames
4683     //
4684     ANGLE_FEATURE_CONDITION(&mFeatures, supportsSwapchainMaintenance1,
4685                             mSwapchainMaintenance1Features.swapchainMaintenance1 == VK_TRUE &&
4686                                 useVulkanSwapchain == UseVulkanSwapchain::Yes);
4687 
4688     // The VK_EXT_legacy_dithering extension enables dithering support without emulation
4689     // Disable the usage of VK_EXT_legacy_dithering on ARM until the driver bug
4690     // http://issuetracker.google.com/293136916, http://issuetracker.google.com/292282210 are fixed.
4691     ANGLE_FEATURE_CONDITION(&mFeatures, supportsLegacyDithering,
4692                             mDitheringFeatures.legacyDithering == VK_TRUE);
4693 
4694     // Applications on Android have come to rely on hardware dithering, and visually regress without
4695     // it.  On desktop GPUs, OpenGL's dithering is a no-op.  The following setting mimics that
4696     // behavior.  Dithering is also currently not enabled on SwiftShader, but can be as needed
4697     // (which would require Chromium and Capture/Replay test expectations updates).
4698     ANGLE_FEATURE_CONDITION(&mFeatures, emulateDithering,
4699                             IsAndroid() && !mFeatures.supportsLegacyDithering.enabled);
4700 
4701     ANGLE_FEATURE_CONDITION(&mFeatures, adjustClearColorPrecision,
4702                             IsAndroid() && mFeatures.supportsLegacyDithering.enabled && isARM);
4703 
4704     // http://anglebug.com/6872
4705     // On ARM hardware, framebuffer-fetch-like behavior on Vulkan is already coherent, so we can
4706     // expose the coherent version of the GL extension despite unofficial Vulkan support.
4707     ANGLE_FEATURE_CONDITION(
4708         &mFeatures, supportsShaderFramebufferFetch,
4709         (IsAndroid() && isARM) || mFeatures.supportsRasterizationOrderAttachmentAccess.enabled);
4710 
4711     // Important games are not checking supported extensions properly, and are confusing the
4712     // GL_EXT_shader_framebuffer_fetch_non_coherent as the GL_EXT_shader_framebuffer_fetch
4713     // extension.  Therefore, don't enable the extension on Android by default.
4714     // https://issuetracker.google.com/issues/186643966
4715     // https://issuetracker.google.com/issues/340665604
4716     ANGLE_FEATURE_CONDITION(&mFeatures, supportsShaderFramebufferFetchNonCoherent, isSwiftShader);
4717 
4718     // On tile-based renderers, breaking the render pass is costly.  Changing into and out of
4719     // framebuffer fetch causes the render pass to break so that the layout of the color attachments
4720     // can be adjusted.  On such hardware, the switch to framebuffer fetch mode is made permanent so
4721     // such render pass breaks don't happen.
4722     ANGLE_FEATURE_CONDITION(&mFeatures, permanentlySwitchToFramebufferFetchMode,
4723                             isTileBasedRenderer);
4724 
4725     // Support EGL_KHR_lock_surface3 extension.
4726     ANGLE_FEATURE_CONDITION(&mFeatures, supportsLockSurfaceExtension, IsAndroid());
4727 
4728     // http://anglebug.com/6878
4729     // Android needs swapbuffers to update image and present to display.
4730     ANGLE_FEATURE_CONDITION(&mFeatures, swapbuffersOnFlushOrFinishWithSingleBuffer, IsAndroid());
4731 
4732     // Workaround a Qualcomm imprecision with dithering
4733     ANGLE_FEATURE_CONDITION(&mFeatures, roundOutputAfterDithering, isQualcomm);
4734 
4735     // GL_KHR_blend_equation_advanced is emulated when the equivalent Vulkan extension is not
4736     // usable.  Additionally, the following platforms don't support INPUT_ATTACHMENT usage for the
4737     // swapchain, so they are excluded:
4738     //
4739     // - Intel on windows
4740     // - Intel on Linux before mesa 22.0
4741     //
4742     // Without VK_GOOGLE_surfaceless_query, there is no way to automatically deduce this support.
4743     const bool isMesaAtLeast22_0_0 = mesaVersion.major >= 22;
4744     ANGLE_FEATURE_CONDITION(
4745         &mFeatures, emulateAdvancedBlendEquations,
4746         !mFeatures.supportsBlendOperationAdvanced.enabled &&
4747             (IsAndroid() || !isIntel || (isIntel && IsLinux() && isMesaAtLeast22_0_0)));
4748 
4749     // http://anglebug.com/6933
4750     // Android expects VkPresentRegionsKHR rectangles with a bottom-left origin, while spec
4751     // states they should have a top-left origin.
4752     ANGLE_FEATURE_CONDITION(&mFeatures, bottomLeftOriginPresentRegionRectangles, IsAndroid());
4753 
4754     // Use VMA for image suballocation.
4755     ANGLE_FEATURE_CONDITION(&mFeatures, useVmaForImageSuballocation, true);
4756 
4757     // Emit SPIR-V 1.4 when supported.  The following old drivers have various bugs with SPIR-V 1.4:
4758     //
4759     // - Nvidia drivers - Crashes when creating pipelines, not using any SPIR-V 1.4 features.  Known
4760     //                    good since at least version 525.  http://anglebug.com/343249127
4761     // - Qualcomm drivers - Crashes when creating pipelines in the presence of OpCopyLogical with
4762     //                      some types.  http://anglebug.com/343218484
4763     // - ARM drivers - Fail tests when OpSelect uses a scalar to select between vectors.  Known good
4764     //                 since at least version 47.  http://anglebug.com/343218491
4765     ANGLE_FEATURE_CONDITION(&mFeatures, supportsSPIRV14,
4766                             ExtensionFound(VK_KHR_SPIRV_1_4_EXTENSION_NAME, deviceExtensionNames) &&
4767                                 !(isNvidia && nvidiaVersion.major < 525) &&
4768                                 !isQualcommProprietary &&
4769                                 !(isARM && armDriverVersion < ARMDriverVersion(47, 0, 0)));
4770 
4771     // Retain debug info in SPIR-V blob.
4772     ANGLE_FEATURE_CONDITION(&mFeatures, retainSPIRVDebugInfo, getEnableValidationLayers());
4773 
4774     // For discrete GPUs, most of device local memory is host invisible. We should not force the
4775     // host visible flag for them and result in allocation failure.
4776     ANGLE_FEATURE_CONDITION(
4777         &mFeatures, preferDeviceLocalMemoryHostVisible,
4778         canPreferDeviceLocalMemoryHostVisible(mPhysicalDeviceProperties.deviceType));
4779 
4780     // Multiple dynamic state issues on ARM have been fixed.
4781     // http://issuetracker.google.com/285124778
4782     // http://issuetracker.google.com/285196249
4783     // http://issuetracker.google.com/286224923
4784     // http://issuetracker.google.com/287318431
4785     //
4786     // On Pixel devices, the issues have been fixed since r44, but on others since r44p1.
4787     //
4788     // Regressions have been detected using r46 on older architectures though
4789     // http://issuetracker.google.com/336411904
4790     const bool isExtendedDynamicStateBuggy =
4791         (isARM && armDriverVersion < ARMDriverVersion(44, 1, 0)) ||
4792         (isMaliJobManagerBasedGPU && armDriverVersion >= ARMDriverVersion(46, 0, 0));
4793 
4794     // Vertex input binding stride is buggy for Windows/Intel drivers before 100.9684.
4795     const bool isVertexInputBindingStrideBuggy =
4796         IsWindows() && isIntel && intelDriverVersion < IntelDriverVersion(100, 9684);
4797 
4798     // Intel driver has issues with VK_EXT_vertex_input_dynamic_state
4799     // http://anglebug.com/7162#c8
4800     ANGLE_FEATURE_CONDITION(&mFeatures, supportsVertexInputDynamicState,
4801                             mVertexInputDynamicStateFeatures.vertexInputDynamicState == VK_TRUE &&
4802                                 !(IsWindows() && isIntel));
4803 
4804     ANGLE_FEATURE_CONDITION(&mFeatures, supportsExtendedDynamicState,
4805                             mExtendedDynamicStateFeatures.extendedDynamicState == VK_TRUE &&
4806                                 !isExtendedDynamicStateBuggy);
4807 
4808     // VK_EXT_vertex_input_dynamic_state enables dynamic state for the full vertex input state. As
4809     // such, when available use supportsVertexInputDynamicState instead of
4810     // useVertexInputBindingStrideDynamicState.
4811     ANGLE_FEATURE_CONDITION(&mFeatures, useVertexInputBindingStrideDynamicState,
4812                             mFeatures.supportsExtendedDynamicState.enabled &&
4813                                 !mFeatures.supportsVertexInputDynamicState.enabled &&
4814                                 !isExtendedDynamicStateBuggy && !isVertexInputBindingStrideBuggy);
4815     ANGLE_FEATURE_CONDITION(
4816         &mFeatures, useCullModeDynamicState,
4817         mFeatures.supportsExtendedDynamicState.enabled && !isExtendedDynamicStateBuggy);
4818     ANGLE_FEATURE_CONDITION(&mFeatures, useDepthCompareOpDynamicState,
4819                             mFeatures.supportsExtendedDynamicState.enabled);
4820     ANGLE_FEATURE_CONDITION(&mFeatures, useDepthTestEnableDynamicState,
4821                             mFeatures.supportsExtendedDynamicState.enabled);
4822     ANGLE_FEATURE_CONDITION(
4823         &mFeatures, useDepthWriteEnableDynamicState,
4824         mFeatures.supportsExtendedDynamicState.enabled && !isExtendedDynamicStateBuggy);
4825     ANGLE_FEATURE_CONDITION(&mFeatures, useFrontFaceDynamicState,
4826                             mFeatures.supportsExtendedDynamicState.enabled);
4827     ANGLE_FEATURE_CONDITION(&mFeatures, useStencilOpDynamicState,
4828                             mFeatures.supportsExtendedDynamicState.enabled);
4829     ANGLE_FEATURE_CONDITION(&mFeatures, useStencilTestEnableDynamicState,
4830                             mFeatures.supportsExtendedDynamicState.enabled);
4831 
4832     ANGLE_FEATURE_CONDITION(&mFeatures, supportsExtendedDynamicState2,
4833                             mExtendedDynamicState2Features.extendedDynamicState2 == VK_TRUE &&
4834                                 !isExtendedDynamicStateBuggy);
4835 
4836     ANGLE_FEATURE_CONDITION(
4837         &mFeatures, usePrimitiveRestartEnableDynamicState,
4838         mFeatures.supportsExtendedDynamicState2.enabled && !isExtendedDynamicStateBuggy);
4839     ANGLE_FEATURE_CONDITION(&mFeatures, useRasterizerDiscardEnableDynamicState,
4840                             mFeatures.supportsExtendedDynamicState2.enabled);
4841     ANGLE_FEATURE_CONDITION(&mFeatures, useDepthBiasEnableDynamicState,
4842                             mFeatures.supportsExtendedDynamicState2.enabled);
4843 
4844     // Disabled on Intel/Mesa due to driver bug (crbug.com/1379201).  This bug is fixed since Mesa
4845     // 22.2.0.
4846     const bool isMesaLessThan22_2 =
4847         mesaVersion.major < 22 || (mesaVersion.major == 22 && mesaVersion.minor < 2);
4848 
4849     ANGLE_FEATURE_CONDITION(
4850         &mFeatures, supportsLogicOpDynamicState,
4851         mFeatures.supportsExtendedDynamicState2.enabled &&
4852             mExtendedDynamicState2Features.extendedDynamicState2LogicOp == VK_TRUE &&
4853             !(IsLinux() && isIntel && isMesaLessThan22_2) && !(IsAndroid() && isGalaxyS23));
4854 
4855     // Samsung Vulkan driver with API level < 1.3.244 has a bug in imageless framebuffer support.
4856     const bool isSamsungDriverWithImagelessFramebufferBug =
4857         isSamsung && mPhysicalDeviceProperties.apiVersion < VK_MAKE_VERSION(1, 3, 244);
4858     ANGLE_FEATURE_CONDITION(&mFeatures, supportsImagelessFramebuffer,
4859                             mImagelessFramebufferFeatures.imagelessFramebuffer == VK_TRUE &&
4860                                 !isSamsungDriverWithImagelessFramebufferBug);
4861 
4862     if (ExtensionFound(VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME, deviceExtensionNames))
4863     {
4864         queryAndCacheFragmentShadingRates();
4865     }
4866 
4867     // Support GL_QCOM_shading_rate extension
4868     ANGLE_FEATURE_CONDITION(&mFeatures, supportsFragmentShadingRate,
4869                             canSupportFragmentShadingRate());
4870 
4871     // Support QCOM foveated rendering extensions.
4872     // Gated on supportsImagelessFramebuffer and supportsRenderPassLoadStoreOpNone
4873     // to reduce code complexity.
4874     ANGLE_FEATURE_CONDITION(&mFeatures, supportsFoveatedRendering,
4875                             mFeatures.supportsImagelessFramebuffer.enabled &&
4876                                 mFeatures.supportsRenderPassLoadStoreOpNone.enabled &&
4877                                 mFeatures.supportsFragmentShadingRate.enabled &&
4878                                 canSupportFoveatedRendering());
4879 
4880     // We can use the interlock to support GL_ANGLE_shader_pixel_local_storage_coherent.
4881     ANGLE_FEATURE_CONDITION(
4882         &mFeatures, supportsFragmentShaderPixelInterlock,
4883         mFragmentShaderInterlockFeatures.fragmentShaderPixelInterlock == VK_TRUE);
4884 
4885     // The VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT behavior is used by
4886     // ANGLE, which requires the robustBufferAccess feature to be available.
4887     ANGLE_FEATURE_CONDITION(&mFeatures, supportsPipelineRobustness,
4888                             mPipelineRobustnessFeatures.pipelineRobustness == VK_TRUE &&
4889                                 mPhysicalDeviceFeatures.robustBufferAccess);
4890 
4891     ANGLE_FEATURE_CONDITION(&mFeatures, supportsPipelineProtectedAccess,
4892                             mPipelineProtectedAccessFeatures.pipelineProtectedAccess == VK_TRUE &&
4893                                 mProtectedMemoryFeatures.protectedMemory == VK_TRUE);
4894 
4895     // VK_EXT_graphics_pipeline_library is available on NVIDIA drivers earlier
4896     // than version 531, but there are transient visual glitches with rendering
4897     // on those earlier versions.  http://anglebug.com/8218
4898     //
4899     // On RADV, creating graphics pipeline can crash in the driver.  http://crbug.com/1497512
4900     ANGLE_FEATURE_CONDITION(&mFeatures, supportsGraphicsPipelineLibrary,
4901                             mGraphicsPipelineLibraryFeatures.graphicsPipelineLibrary == VK_TRUE &&
4902                                 (!isNvidia || nvidiaVersion.major >= 531) && !isRADV);
4903 
4904     // The following drivers are known to key the pipeline cache blobs with vertex input and
4905     // fragment output state, causing draw-time pipeline creation to miss the cache regardless of
4906     // warmup:
4907     //
4908     // - ARM drivers
4909     // - Imagination drivers
4910     //
4911     // The following drivers are instead known to _not_ include said state, and hit the cache at
4912     // draw time.
4913     //
4914     // - SwiftShader
4915     // - Open source Qualcomm drivers
4916     //
4917     // The situation is unknown for other drivers.
4918     //
4919     // Additionally, numerous tests that previously never created a Vulkan pipeline fail or crash on
4920     // proprietary Qualcomm drivers when they do during cache warm up.  On Intel/Linux, one trace
4921     // shows flakiness with this.
4922     const bool libraryBlobsAreReusedByMonolithicPipelines = !isARM && !isPowerVR;
4923     ANGLE_FEATURE_CONDITION(&mFeatures, warmUpPipelineCacheAtLink,
4924                             libraryBlobsAreReusedByMonolithicPipelines && !isQualcommProprietary &&
4925                                 !(IsLinux() && isIntel) && !(IsChromeOS() && isSwiftShader));
4926 
4927     // On SwiftShader, no data is retrieved from the pipeline cache, so there is no reason to
4928     // serialize it or put it in the blob cache.
4929     // For Windows Nvidia Vulkan driver older than 520, Vulkan pipeline cache will only generate one
4930     // single huge cache for one process shared by all graphics piplines in the same process, which
4931     // can be huge.
4932     const bool nvVersionLessThan520 = isNvidia && (nvidiaVersion.major < 520u);
4933     ANGLE_FEATURE_CONDITION(&mFeatures, hasEffectivePipelineCacheSerialization,
4934                             !isSwiftShader && !nvVersionLessThan520);
4935 
4936     // When the driver sets graphicsPipelineLibraryFastLinking, it means that monolithic pipelines
4937     // are just a bundle of the libraries, and that there is no benefit in creating monolithic
4938     // pipelines.
4939     //
4940     // Note: for testing purposes, this is enabled on SwiftShader despite the fact that it doesn't
4941     // need it.  This should be undone once there is at least one bot that supports
4942     // VK_EXT_graphics_pipeline_library without graphicsPipelineLibraryFastLinking
4943     ANGLE_FEATURE_CONDITION(
4944         &mFeatures, preferMonolithicPipelinesOverLibraries,
4945         !mGraphicsPipelineLibraryProperties.graphicsPipelineLibraryFastLinking || isSwiftShader);
4946 
4947     // Whether the pipeline caches should merge into the global pipeline cache.  This should only be
4948     // enabled on platforms if:
4949     //
4950     // - VK_EXT_graphics_pipeline_library is not supported.  In that case, only the program's cache
4951     //   used during warm up is merged into the global cache for later monolithic pipeline creation.
4952     // - VK_EXT_graphics_pipeline_library is supported, monolithic pipelines are preferred, and the
4953     //   driver is able to reuse blobs from partial pipelines when creating monolithic pipelines.
4954     ANGLE_FEATURE_CONDITION(&mFeatures, mergeProgramPipelineCachesToGlobalCache,
4955                             !mFeatures.supportsGraphicsPipelineLibrary.enabled ||
4956                                 (mFeatures.preferMonolithicPipelinesOverLibraries.enabled &&
4957                                  libraryBlobsAreReusedByMonolithicPipelines));
4958 
4959     ANGLE_FEATURE_CONDITION(&mFeatures, enableAsyncPipelineCacheCompression, true);
4960 
4961     // Sync monolithic pipelines to the blob cache occasionally on platforms that would benefit from
4962     // it:
4963     //
4964     // - VK_EXT_graphics_pipeline_library is not supported, and the program cache is not warmed up:
4965     //   If the pipeline cache is being warmed up at link time, the blobs corresponding to each
4966     //   program is individually retrieved and stored in the blob cache already.
4967     // - VK_EXT_graphics_pipeline_library is supported, but monolithic pipelines are still prefered,
4968     //   and the cost of syncing the large cache is acceptable.
4969     //
4970     // Otherwise monolithic pipelines are recreated on every run.
4971     const bool hasNoPipelineWarmUp = !mFeatures.supportsGraphicsPipelineLibrary.enabled &&
4972                                      !mFeatures.warmUpPipelineCacheAtLink.enabled;
4973     const bool canSyncLargeMonolithicCache =
4974         mFeatures.supportsGraphicsPipelineLibrary.enabled &&
4975         mFeatures.preferMonolithicPipelinesOverLibraries.enabled &&
4976         (!IsAndroid() || mFeatures.enableAsyncPipelineCacheCompression.enabled);
4977     ANGLE_FEATURE_CONDITION(&mFeatures, syncMonolithicPipelinesToBlobCache,
4978                             mFeatures.hasEffectivePipelineCacheSerialization.enabled &&
4979                                 (hasNoPipelineWarmUp || canSyncLargeMonolithicCache));
4980 
4981     // On ARM, dynamic state for stencil write mask doesn't work correctly in the presence of
4982     // discard or alpha to coverage, if the static state provided when creating the pipeline has a
4983     // value of 0.
4984     ANGLE_FEATURE_CONDITION(&mFeatures, useNonZeroStencilWriteMaskStaticState,
4985                             isARM && armDriverVersion < ARMDriverVersion(43, 0, 0));
4986 
4987     // On ARM, per-sample shading is not enabled despite the presence of a Sample decoration.  As a
4988     // workaround, per-sample shading is inferred by ANGLE and explicitly enabled by the API.
4989     ANGLE_FEATURE_CONDITION(&mFeatures, explicitlyEnablePerSampleShading, isARM);
4990 
4991     ANGLE_FEATURE_CONDITION(&mFeatures, explicitlyCastMediumpFloatTo16Bit, isARM);
4992 
4993     // Force to create swapchain with continuous refresh on shared present. Disabled by default.
4994     // Only enable it on integrations without EGL_FRONT_BUFFER_AUTO_REFRESH_ANDROID passthrough.
4995     ANGLE_FEATURE_CONDITION(&mFeatures, forceContinuousRefreshOnSharedPresent, false);
4996 
4997     // Enable setting frame timestamp surface attribute on Android platform.
4998     // Frame timestamp is enabled by calling into "vkGetPastPresentationTimingGOOGLE"
4999     // which, on Android platforms, makes the necessary ANativeWindow API calls.
5000     ANGLE_FEATURE_CONDITION(&mFeatures, supportsTimestampSurfaceAttribute,
5001                             IsAndroid() && ExtensionFound(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME,
5002                                                           deviceExtensionNames));
5003 
5004     // Only enable VK_EXT_host_image_copy on hardware where identicalMemoryTypeRequirements is set.
5005     // That lets ANGLE avoid having to fallback to non-host-copyable image allocations if the
5006     // host-copyable one fails due to out-of-that-specific-kind-of-memory.
5007     //
5008     // Disabled on Fuchsia until they upgrade their version of VVL.
5009     ANGLE_FEATURE_CONDITION(&mFeatures, supportsHostImageCopy,
5010                             mHostImageCopyFeatures.hostImageCopy == VK_TRUE &&
5011                                 mHostImageCopyProperties.identicalMemoryTypeRequirements &&
5012                                 !IsFuchsia());
5013 
5014     // 1) host vk driver does not natively support ETC format.
5015     // 2) host vk driver supports BC format.
5016     // 3) host vk driver supports subgroup instructions: clustered, shuffle.
5017     //    * This limitation can be removed if necessary.
5018     // 4) host vk driver has maxTexelBufferSize >= 64M.
5019     //    * Usually on desktop device the limit is more than 128M. we may switch to dynamic
5020     //    decide cpu or gpu upload texture based on texture size.
5021     constexpr VkSubgroupFeatureFlags kRequiredSubgroupOp =
5022         VK_SUBGROUP_FEATURE_SHUFFLE_BIT | VK_SUBGROUP_FEATURE_CLUSTERED_BIT;
5023     static constexpr bool kSupportTranscodeEtcToBc = false;
5024     static constexpr uint32_t kMaxTexelBufferSize  = 64 * 1024 * 1024;
5025     const VkPhysicalDeviceLimits &limitsVk         = mPhysicalDeviceProperties.limits;
5026     ANGLE_FEATURE_CONDITION(&mFeatures, supportsComputeTranscodeEtcToBc,
5027                             !mPhysicalDeviceFeatures.textureCompressionETC2 &&
5028                                 kSupportTranscodeEtcToBc &&
5029                                 (mSubgroupProperties.supportedOperations & kRequiredSubgroupOp) ==
5030                                     kRequiredSubgroupOp &&
5031                                 (limitsVk.maxTexelBufferElements >= kMaxTexelBufferSize));
5032 
5033     // Limit GL_MAX_SHADER_STORAGE_BLOCK_SIZE to 256MB on older ARM hardware.
5034     ANGLE_FEATURE_CONDITION(&mFeatures, limitMaxStorageBufferSize, isMaliJobManagerBasedGPU);
5035 
5036     // http://anglebug.com/7308
5037     // Flushing mutable textures causes flakes in perf tests using Windows/Intel GPU. Failures are
5038     // due to lost context/device.
5039     // http://b/278600575
5040     // Flushing mutable texture is disabled for discrete GPUs to mitigate possible VRAM OOM.
5041     ANGLE_FEATURE_CONDITION(
5042         &mFeatures, mutableMipmapTextureUpload,
5043         canPreferDeviceLocalMemoryHostVisible(mPhysicalDeviceProperties.deviceType));
5044 
5045     // Allow passthrough of EGL colorspace attributes on Android platform and for vendors that
5046     // are known to support wide color gamut.
5047     ANGLE_FEATURE_CONDITION(&mFeatures, eglColorspaceAttributePassthrough,
5048                             IsAndroid() && isSamsung);
5049 
5050     // GBM does not have a VkSurface hence it does not support presentation through a Vulkan queue.
5051     ANGLE_FEATURE_CONDITION(&mFeatures, supportsPresentation,
5052                             nativeWindowSystem != angle::NativeWindowSystem::Gbm);
5053 
5054     // For tiled renderer, the renderpass query result may not available until the entire renderpass
5055     // is completed. This may cause a bubble in the application thread waiting result to be
5056     // available. When this feature flag is enabled, we will issue an immediate flush when we detect
5057     // there is switch from query enabled draw to query disabled draw. Since most apps uses bunch of
5058     // query back to back, this should only introduce one extra flush per frame.
5059     // https://issuetracker.google.com/250706693
5060     ANGLE_FEATURE_CONDITION(&mFeatures, preferSubmitOnAnySamplesPassedQueryEnd,
5061                             isTileBasedRenderer);
5062 
5063     // ARM driver appears having a bug that if we did not wait for submission to complete, but call
5064     // vkGetQueryPoolResults(VK_QUERY_RESULT_WAIT_BIT), it may result VK_NOT_READY.
5065     // https://issuetracker.google.com/253522366
5066     //
5067     // Workaround for nvidia earlier version driver which appears having a bug that On older nvidia
5068     // driver, vkGetQueryPoolResult() with VK_QUERY_RESULT_WAIT_BIT may result in incorrect result.
5069     // In that case we force into CPU wait for submission to complete. http://anglebug.com/6692
5070     ANGLE_FEATURE_CONDITION(&mFeatures, forceWaitForSubmissionToCompleteForQueryResult,
5071                             isARM || (isNvidia && nvidiaVersion.major < 470u));
5072 
5073     // Some ARM drivers may not free memory in "vkFreeCommandBuffers()" without
5074     // VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT flag.
5075     ANGLE_FEATURE_CONDITION(&mFeatures, useResetCommandBufferBitForSecondaryPools, isARM);
5076 
5077     // Intel and AMD mesa drivers need depthBiasConstantFactor to be doubled to align with GL.
5078     ANGLE_FEATURE_CONDITION(&mFeatures, doubleDepthBiasConstantFactor,
5079                             (isIntel && !IsWindows()) || isRADV || isNvidia);
5080 
5081     // Required to pass android.media.codec.cts.EncodeDecodeTest
5082     // https://issuetracker.google.com/246218584
5083     ANGLE_FEATURE_CONDITION(
5084         &mFeatures, mapUnspecifiedColorSpaceToPassThrough,
5085         ExtensionFound(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME, mEnabledInstanceExtensions));
5086 
5087     ANGLE_FEATURE_CONDITION(&mFeatures, enablePipelineCacheDataCompression, true);
5088 
5089     ANGLE_FEATURE_CONDITION(&mFeatures, supportsTimelineSemaphore,
5090                             mTimelineSemaphoreFeatures.timelineSemaphore == VK_TRUE);
5091 
5092     // 8bit storage features
5093     ANGLE_FEATURE_CONDITION(&mFeatures, supports8BitStorageBuffer,
5094                             m8BitStorageFeatures.storageBuffer8BitAccess == VK_TRUE);
5095 
5096     ANGLE_FEATURE_CONDITION(&mFeatures, supports8BitUniformAndStorageBuffer,
5097                             m8BitStorageFeatures.uniformAndStorageBuffer8BitAccess == VK_TRUE);
5098 
5099     ANGLE_FEATURE_CONDITION(&mFeatures, supports8BitPushConstant,
5100                             m8BitStorageFeatures.storagePushConstant8 == VK_TRUE);
5101 
5102     // 16bit storage features
5103     ANGLE_FEATURE_CONDITION(&mFeatures, supports16BitStorageBuffer,
5104                             m16BitStorageFeatures.storageBuffer16BitAccess == VK_TRUE);
5105 
5106     ANGLE_FEATURE_CONDITION(&mFeatures, supports16BitUniformAndStorageBuffer,
5107                             m16BitStorageFeatures.uniformAndStorageBuffer16BitAccess == VK_TRUE);
5108 
5109     ANGLE_FEATURE_CONDITION(&mFeatures, supports16BitPushConstant,
5110                             m16BitStorageFeatures.storagePushConstant16 == VK_TRUE);
5111 
5112     ANGLE_FEATURE_CONDITION(&mFeatures, supports16BitInputOutput,
5113                             m16BitStorageFeatures.storageInputOutput16 == VK_TRUE);
5114 
5115 #if defined(ANGLE_PLATFORM_ANDROID)
5116     ANGLE_FEATURE_CONDITION(&mFeatures, supportsExternalFormatResolve,
5117                             mExternalFormatResolveFeatures.externalFormatResolve == VK_TRUE);
5118 #else
5119     ANGLE_FEATURE_CONDITION(&mFeatures, supportsExternalFormatResolve, false);
5120 #endif
5121 
5122     ANGLE_FEATURE_CONDITION(&mFeatures, useVkEventForImageBarrier, false);
5123 
5124     ANGLE_FEATURE_CONDITION(&mFeatures, supportsDynamicRendering,
5125                             mDynamicRenderingFeatures.dynamicRendering == VK_TRUE);
5126     ANGLE_FEATURE_CONDITION(
5127         &mFeatures, supportsDynamicRenderingLocalRead,
5128         mDynamicRenderingLocalReadFeatures.dynamicRenderingLocalRead == VK_TRUE);
5129 
5130     // Dynamic rendering usage is not yet implemented.
5131     ANGLE_FEATURE_CONDITION(&mFeatures, preferDynamicRendering, false);
5132 
5133     // Disable memory report feature overrides if extension is not supported.
5134     if ((mFeatures.logMemoryReportCallbacks.enabled || mFeatures.logMemoryReportStats.enabled) &&
5135         !mMemoryReportFeatures.deviceMemoryReport)
5136     {
5137         WARN() << "Disabling the following feature(s) because driver does not support "
5138                   "VK_EXT_device_memory_report extension:";
5139         if (getFeatures().logMemoryReportStats.enabled)
5140         {
5141             WARN() << "\tlogMemoryReportStats";
5142             mFeatures.logMemoryReportStats.applyOverride(false);
5143         }
5144         if (getFeatures().logMemoryReportCallbacks.enabled)
5145         {
5146             WARN() << "\tlogMemoryReportCallbacks";
5147             mFeatures.logMemoryReportCallbacks.applyOverride(false);
5148         }
5149     }
5150 
5151     // Check if VK implementation needs to strip-out non-semantic reflection info from shader module
5152     // (Default is to assume not supported)
5153     ANGLE_FEATURE_CONDITION(&mFeatures, supportsShaderNonSemanticInfo, false);
5154 }
5155 
appBasedFeatureOverrides(const vk::ExtensionNameList & extensions)5156 void Renderer::appBasedFeatureOverrides(const vk::ExtensionNameList &extensions) {}
5157 
initPipelineCache(vk::Context * context,vk::PipelineCache * pipelineCache,bool * success)5158 angle::Result Renderer::initPipelineCache(vk::Context *context,
5159                                           vk::PipelineCache *pipelineCache,
5160                                           bool *success)
5161 {
5162     angle::MemoryBuffer initialData;
5163     if (!mFeatures.disablePipelineCacheLoadForTesting.enabled)
5164     {
5165         ANGLE_TRY(GetAndDecompressPipelineCacheVk(mPhysicalDeviceProperties, context, mGlobalOps,
5166                                                   &initialData, success));
5167     }
5168 
5169     VkPipelineCacheCreateInfo pipelineCacheCreateInfo = {};
5170 
5171     pipelineCacheCreateInfo.sType           = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
5172     pipelineCacheCreateInfo.flags           = 0;
5173     pipelineCacheCreateInfo.initialDataSize = *success ? initialData.size() : 0;
5174     pipelineCacheCreateInfo.pInitialData    = *success ? initialData.data() : nullptr;
5175 
5176     ANGLE_VK_TRY(context, pipelineCache->init(mDevice, pipelineCacheCreateInfo));
5177 
5178     return angle::Result::Continue;
5179 }
5180 
ensurePipelineCacheInitialized(vk::Context * context)5181 angle::Result Renderer::ensurePipelineCacheInitialized(vk::Context *context)
5182 {
5183     // If it is initialized already, there is nothing to do
5184     if (mPipelineCacheInitialized)
5185     {
5186         return angle::Result::Continue;
5187     }
5188 
5189     std::unique_lock<angle::SimpleMutex> lock(mPipelineCacheMutex);
5190 
5191     // If another thread initialized it first don't redo it
5192     if (mPipelineCacheInitialized)
5193     {
5194         return angle::Result::Continue;
5195     }
5196 
5197     // We should now create the pipeline cache with the blob cache pipeline data.
5198     bool loadedFromBlobCache = false;
5199     ANGLE_TRY(initPipelineCache(context, &mPipelineCache, &loadedFromBlobCache));
5200     if (loadedFromBlobCache)
5201     {
5202         ANGLE_TRY(getPipelineCacheSize(context, &mPipelineCacheSizeAtLastSync));
5203     }
5204 
5205     mPipelineCacheInitialized = true;
5206 
5207     return angle::Result::Continue;
5208 }
5209 
getPipelineCache(vk::Context * context,vk::PipelineCacheAccess * pipelineCacheOut)5210 angle::Result Renderer::getPipelineCache(vk::Context *context,
5211                                          vk::PipelineCacheAccess *pipelineCacheOut)
5212 {
5213     ANGLE_TRY(ensurePipelineCacheInitialized(context));
5214 
5215     angle::SimpleMutex *pipelineCacheMutex =
5216         (context->getFeatures().mergeProgramPipelineCachesToGlobalCache.enabled)
5217             ? &mPipelineCacheMutex
5218             : nullptr;
5219 
5220     pipelineCacheOut->init(&mPipelineCache, pipelineCacheMutex);
5221     return angle::Result::Continue;
5222 }
5223 
mergeIntoPipelineCache(vk::Context * context,const vk::PipelineCache & pipelineCache)5224 angle::Result Renderer::mergeIntoPipelineCache(vk::Context *context,
5225                                                const vk::PipelineCache &pipelineCache)
5226 {
5227     // It is an error to call into this method when the feature is disabled.
5228     ASSERT(context->getFeatures().mergeProgramPipelineCachesToGlobalCache.enabled);
5229 
5230     vk::PipelineCacheAccess globalCache;
5231     ANGLE_TRY(getPipelineCache(context, &globalCache));
5232 
5233     globalCache.merge(this, pipelineCache);
5234 
5235     return angle::Result::Continue;
5236 }
5237 
getNativeCaps() const5238 const gl::Caps &Renderer::getNativeCaps() const
5239 {
5240     ensureCapsInitialized();
5241     return mNativeCaps;
5242 }
5243 
getNativeTextureCaps() const5244 const gl::TextureCapsMap &Renderer::getNativeTextureCaps() const
5245 {
5246     ensureCapsInitialized();
5247     return mNativeTextureCaps;
5248 }
5249 
getNativeExtensions() const5250 const gl::Extensions &Renderer::getNativeExtensions() const
5251 {
5252     ensureCapsInitialized();
5253     return mNativeExtensions;
5254 }
5255 
getNativeLimitations() const5256 const gl::Limitations &Renderer::getNativeLimitations() const
5257 {
5258     ensureCapsInitialized();
5259     return mNativeLimitations;
5260 }
5261 
getNativePixelLocalStorageOptions() const5262 const ShPixelLocalStorageOptions &Renderer::getNativePixelLocalStorageOptions() const
5263 {
5264     return mNativePLSOptions;
5265 }
5266 
initializeFrontendFeatures(angle::FrontendFeatures * features) const5267 void Renderer::initializeFrontendFeatures(angle::FrontendFeatures *features) const
5268 {
5269     const bool isSwiftShader =
5270         IsSwiftshader(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID);
5271 
5272     // Hopefully-temporary work-around for a crash on SwiftShader.  An Android process is turning
5273     // off GL error checking, and then asking ANGLE to write past the end of a buffer.
5274     // https://issuetracker.google.com/issues/220069903
5275     ANGLE_FEATURE_CONDITION(features, forceGlErrorChecking, (IsAndroid() && isSwiftShader));
5276 
5277     ANGLE_FEATURE_CONDITION(features, cacheCompiledShader, true);
5278 
5279     // https://issuetracker.google.com/292285899
5280     ANGLE_FEATURE_CONDITION(features, uncurrentEglSurfaceUponSurfaceDestroy, true);
5281 
5282     // The Vulkan backend's handling of compile and link is thread-safe
5283     ANGLE_FEATURE_CONDITION(features, compileJobIsThreadSafe, true);
5284     ANGLE_FEATURE_CONDITION(features, linkJobIsThreadSafe, true);
5285     // Always run the link's warm up job in a thread.  It's an optimization only, and does not block
5286     // the link resolution.
5287     ANGLE_FEATURE_CONDITION(features, alwaysRunLinkSubJobsThreaded, true);
5288 }
5289 
getPipelineCacheSize(vk::Context * context,size_t * pipelineCacheSizeOut)5290 angle::Result Renderer::getPipelineCacheSize(vk::Context *context, size_t *pipelineCacheSizeOut)
5291 {
5292     ANGLE_VK_TRY(context, mPipelineCache.getCacheData(mDevice, pipelineCacheSizeOut, nullptr));
5293     return angle::Result::Continue;
5294 }
5295 
syncPipelineCacheVk(vk::Context * context,vk::GlobalOps * globalOps,const gl::Context * contextGL)5296 angle::Result Renderer::syncPipelineCacheVk(vk::Context *context,
5297                                             vk::GlobalOps *globalOps,
5298                                             const gl::Context *contextGL)
5299 {
5300     ASSERT(mPipelineCache.valid());
5301 
5302     if (!mFeatures.syncMonolithicPipelinesToBlobCache.enabled)
5303     {
5304         return angle::Result::Continue;
5305     }
5306 
5307     if (--mPipelineCacheVkUpdateTimeout > 0)
5308     {
5309         return angle::Result::Continue;
5310     }
5311 
5312     mPipelineCacheVkUpdateTimeout = kPipelineCacheVkUpdatePeriod;
5313 
5314     size_t pipelineCacheSize = 0;
5315     ANGLE_TRY(getPipelineCacheSize(context, &pipelineCacheSize));
5316     if (pipelineCacheSize <= mPipelineCacheSizeAtLastSync)
5317     {
5318         return angle::Result::Continue;
5319     }
5320     mPipelineCacheSizeAtLastSync = pipelineCacheSize;
5321 
5322     // Make sure we will receive enough data to hold the pipeline cache header
5323     // Table 7. Layout for pipeline cache header version VK_PIPELINE_CACHE_HEADER_VERSION_ONE
5324     const size_t kPipelineCacheHeaderSize = 16 + VK_UUID_SIZE;
5325     if (pipelineCacheSize < kPipelineCacheHeaderSize)
5326     {
5327         // No pipeline cache data to read, so return
5328         return angle::Result::Continue;
5329     }
5330 
5331     ContextVk *contextVk = vk::GetImpl(contextGL);
5332 
5333     // Use worker thread pool to complete compression.
5334     // If the last task hasn't been finished, skip the syncing.
5335     if (mCompressEvent && !mCompressEvent->isReady())
5336     {
5337         ANGLE_PERF_WARNING(contextVk->getDebug(), GL_DEBUG_SEVERITY_LOW,
5338                            "Skip syncing pipeline cache data when the last task is not ready.");
5339         return angle::Result::Continue;
5340     }
5341 
5342     std::vector<uint8_t> pipelineCacheData(pipelineCacheSize);
5343 
5344     size_t oldPipelineCacheSize = pipelineCacheSize;
5345     VkResult result =
5346         mPipelineCache.getCacheData(mDevice, &pipelineCacheSize, pipelineCacheData.data());
5347     // We don't need all of the cache data, so just make sure we at least got the header
5348     // Vulkan Spec 9.6. Pipeline Cache
5349     // https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/chap9.html#pipelines-cache
5350     // If pDataSize is less than what is necessary to store this header, nothing will be written to
5351     // pData and zero will be written to pDataSize.
5352     // Any data written to pData is valid and can be provided as the pInitialData member of the
5353     // VkPipelineCacheCreateInfo structure passed to vkCreatePipelineCache.
5354     if (ANGLE_UNLIKELY(pipelineCacheSize < kPipelineCacheHeaderSize))
5355     {
5356         WARN() << "Not enough pipeline cache data read.";
5357         return angle::Result::Continue;
5358     }
5359     else if (ANGLE_UNLIKELY(result == VK_INCOMPLETE))
5360     {
5361         WARN() << "Received VK_INCOMPLETE: Old: " << oldPipelineCacheSize
5362                << ", New: " << pipelineCacheSize;
5363     }
5364     else
5365     {
5366         ANGLE_VK_TRY(context, result);
5367     }
5368 
5369     // If vkGetPipelineCacheData ends up writing fewer bytes than requested, shrink the buffer to
5370     // avoid leaking garbage memory and potential rejection of the data by subsequent
5371     // vkCreatePipelineCache call.  Some drivers may ignore entire buffer if there padding present.
5372     ASSERT(pipelineCacheSize <= pipelineCacheData.size());
5373     pipelineCacheData.resize(pipelineCacheSize);
5374 
5375     if (mFeatures.enableAsyncPipelineCacheCompression.enabled)
5376     {
5377         // zlib compression ratio normally ranges from 2:1 to 5:1. Set kMaxTotalSize to 64M to
5378         // ensure the size can fit into the 32MB blob cache limit on supported platforms.
5379         constexpr size_t kMaxTotalSize = 64 * 1024 * 1024;
5380 
5381         // Create task to compress.
5382         mCompressEvent = contextGL->getWorkerThreadPool()->postWorkerTask(
5383             std::make_shared<CompressAndStorePipelineCacheTask>(
5384                 globalOps, contextVk, std::move(pipelineCacheData), kMaxTotalSize));
5385     }
5386     else
5387     {
5388         // If enableAsyncPipelineCacheCompression is disabled, to avoid the risk, set kMaxTotalSize
5389         // to 64k.
5390         constexpr size_t kMaxTotalSize = 64 * 1024;
5391         CompressAndStorePipelineCacheVk(mPhysicalDeviceProperties, globalOps, contextVk,
5392                                         pipelineCacheData, kMaxTotalSize);
5393     }
5394 
5395     return angle::Result::Continue;
5396 }
5397 
5398 // These functions look at the mandatory format for support, and fallback to querying the device (if
5399 // necessary) to test the availability of the bits.
hasLinearImageFormatFeatureBits(angle::FormatID formatID,const VkFormatFeatureFlags featureBits) const5400 bool Renderer::hasLinearImageFormatFeatureBits(angle::FormatID formatID,
5401                                                const VkFormatFeatureFlags featureBits) const
5402 {
5403     return hasFormatFeatureBits<&VkFormatProperties::linearTilingFeatures>(formatID, featureBits);
5404 }
5405 
getLinearImageFormatFeatureBits(angle::FormatID formatID,const VkFormatFeatureFlags featureBits) const5406 VkFormatFeatureFlags Renderer::getLinearImageFormatFeatureBits(
5407     angle::FormatID formatID,
5408     const VkFormatFeatureFlags featureBits) const
5409 {
5410     return getFormatFeatureBits<&VkFormatProperties::linearTilingFeatures>(formatID, featureBits);
5411 }
5412 
getImageFormatFeatureBits(angle::FormatID formatID,const VkFormatFeatureFlags featureBits) const5413 VkFormatFeatureFlags Renderer::getImageFormatFeatureBits(
5414     angle::FormatID formatID,
5415     const VkFormatFeatureFlags featureBits) const
5416 {
5417     return getFormatFeatureBits<&VkFormatProperties::optimalTilingFeatures>(formatID, featureBits);
5418 }
5419 
hasImageFormatFeatureBits(angle::FormatID formatID,const VkFormatFeatureFlags featureBits) const5420 bool Renderer::hasImageFormatFeatureBits(angle::FormatID formatID,
5421                                          const VkFormatFeatureFlags featureBits) const
5422 {
5423     return hasFormatFeatureBits<&VkFormatProperties::optimalTilingFeatures>(formatID, featureBits);
5424 }
5425 
hasBufferFormatFeatureBits(angle::FormatID formatID,const VkFormatFeatureFlags featureBits) const5426 bool Renderer::hasBufferFormatFeatureBits(angle::FormatID formatID,
5427                                           const VkFormatFeatureFlags featureBits) const
5428 {
5429     return hasFormatFeatureBits<&VkFormatProperties::bufferFeatures>(formatID, featureBits);
5430 }
5431 
outputVmaStatString()5432 void Renderer::outputVmaStatString()
5433 {
5434     // Output the VMA stats string
5435     // This JSON string can be passed to VmaDumpVis.py to generate a visualization of the
5436     // allocations the VMA has performed.
5437     char *statsString;
5438     mAllocator.buildStatsString(&statsString, true);
5439     INFO() << std::endl << statsString << std::endl;
5440     mAllocator.freeStatsString(statsString);
5441 }
5442 
queueSubmitOneOff(vk::Context * context,vk::PrimaryCommandBuffer && primary,vk::ProtectionType protectionType,egl::ContextPriority priority,VkSemaphore waitSemaphore,VkPipelineStageFlags waitSemaphoreStageMasks,vk::SubmitPolicy submitPolicy,QueueSerial * queueSerialOut)5443 angle::Result Renderer::queueSubmitOneOff(vk::Context *context,
5444                                           vk::PrimaryCommandBuffer &&primary,
5445                                           vk::ProtectionType protectionType,
5446                                           egl::ContextPriority priority,
5447                                           VkSemaphore waitSemaphore,
5448                                           VkPipelineStageFlags waitSemaphoreStageMasks,
5449                                           vk::SubmitPolicy submitPolicy,
5450                                           QueueSerial *queueSerialOut)
5451 {
5452     ANGLE_TRACE_EVENT0("gpu.angle", "Renderer::queueSubmitOneOff");
5453     // Allocate a one off SerialIndex and generate a QueueSerial and then use it and release the
5454     // index.
5455     vk::ScopedQueueSerialIndex index;
5456     ANGLE_TRY(allocateScopedQueueSerialIndex(&index));
5457     QueueSerial submitQueueSerial(index.get(), generateQueueSerial(index.get()));
5458 
5459     if (isAsyncCommandQueueEnabled())
5460     {
5461         ANGLE_TRY(mCommandProcessor.enqueueSubmitOneOffCommands(
5462             context, protectionType, priority, primary.getHandle(), waitSemaphore,
5463             waitSemaphoreStageMasks, submitPolicy, submitQueueSerial));
5464     }
5465     else
5466     {
5467         ANGLE_TRY(mCommandQueue.queueSubmitOneOff(
5468             context, protectionType, priority, primary.getHandle(), waitSemaphore,
5469             waitSemaphoreStageMasks, submitPolicy, submitQueueSerial));
5470     }
5471 
5472     *queueSerialOut = submitQueueSerial;
5473     if (primary.valid())
5474     {
5475         mOneOffCommandPoolMap[protectionType].releaseCommandBuffer(submitQueueSerial,
5476                                                                    std::move(primary));
5477     }
5478 
5479     ANGLE_TRY(mCommandQueue.postSubmitCheck(context));
5480 
5481     return angle::Result::Continue;
5482 }
5483 
queueSubmitWaitSemaphore(vk::Context * context,egl::ContextPriority priority,const vk::Semaphore & waitSemaphore,VkPipelineStageFlags waitSemaphoreStageMasks,QueueSerial submitQueueSerial)5484 angle::Result Renderer::queueSubmitWaitSemaphore(vk::Context *context,
5485                                                  egl::ContextPriority priority,
5486                                                  const vk::Semaphore &waitSemaphore,
5487                                                  VkPipelineStageFlags waitSemaphoreStageMasks,
5488                                                  QueueSerial submitQueueSerial)
5489 {
5490     if (isAsyncCommandQueueEnabled())
5491     {
5492         ANGLE_TRY(mCommandProcessor.enqueueSubmitOneOffCommands(
5493             context, vk::ProtectionType::Unprotected, priority, VK_NULL_HANDLE,
5494             waitSemaphore.getHandle(), waitSemaphoreStageMasks, vk::SubmitPolicy::AllowDeferred,
5495             submitQueueSerial));
5496     }
5497     else
5498     {
5499         ANGLE_TRY(mCommandQueue.queueSubmitOneOff(
5500             context, vk::ProtectionType::Unprotected, priority, VK_NULL_HANDLE,
5501             waitSemaphore.getHandle(), waitSemaphoreStageMasks, vk::SubmitPolicy::AllowDeferred,
5502             submitQueueSerial));
5503     }
5504 
5505     return angle::Result::Continue;
5506 }
5507 
5508 template <VkFormatFeatureFlags VkFormatProperties::*features>
getFormatFeatureBits(angle::FormatID formatID,const VkFormatFeatureFlags featureBits) const5509 VkFormatFeatureFlags Renderer::getFormatFeatureBits(angle::FormatID formatID,
5510                                                     const VkFormatFeatureFlags featureBits) const
5511 {
5512     ASSERT(formatID != angle::FormatID::NONE);
5513     VkFormatProperties &deviceProperties = mFormatProperties[formatID];
5514 
5515     if (deviceProperties.bufferFeatures == kInvalidFormatFeatureFlags)
5516     {
5517         // If we don't have the actual device features, see if the requested features are mandatory.
5518         // If so, there's no need to query the device.
5519         const VkFormatProperties &mandatoryProperties = vk::GetMandatoryFormatSupport(formatID);
5520         if (IsMaskFlagSet(mandatoryProperties.*features, featureBits))
5521         {
5522             return featureBits;
5523         }
5524 
5525         if (vk::IsYUVExternalFormat(formatID))
5526         {
5527             const vk::ExternalYuvFormatInfo &externalFormatInfo =
5528                 mExternalFormatTable.getExternalFormatInfo(formatID);
5529             deviceProperties.optimalTilingFeatures = externalFormatInfo.formatFeatures;
5530         }
5531         else
5532         {
5533             VkFormat vkFormat = vk::GetVkFormatFromFormatID(formatID);
5534             ASSERT(vkFormat != VK_FORMAT_UNDEFINED);
5535 
5536             // Otherwise query the format features and cache it.
5537             vkGetPhysicalDeviceFormatProperties(mPhysicalDevice, vkFormat, &deviceProperties);
5538             // Workaround for some Android devices that don't indicate filtering
5539             // support on D16_UNORM and they should.
5540             if (mFeatures.forceD16TexFilter.enabled && vkFormat == VK_FORMAT_D16_UNORM)
5541             {
5542                 deviceProperties.*features |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
5543             }
5544         }
5545     }
5546 
5547     return deviceProperties.*features & featureBits;
5548 }
5549 
5550 template <VkFormatFeatureFlags VkFormatProperties::*features>
hasFormatFeatureBits(angle::FormatID formatID,const VkFormatFeatureFlags featureBits) const5551 bool Renderer::hasFormatFeatureBits(angle::FormatID formatID,
5552                                     const VkFormatFeatureFlags featureBits) const
5553 {
5554     return IsMaskFlagSet(getFormatFeatureBits<features>(formatID, featureBits), featureBits);
5555 }
5556 
haveSameFormatFeatureBits(angle::FormatID formatID1,angle::FormatID formatID2) const5557 bool Renderer::haveSameFormatFeatureBits(angle::FormatID formatID1, angle::FormatID formatID2) const
5558 {
5559     if (formatID1 == angle::FormatID::NONE || formatID2 == angle::FormatID::NONE)
5560     {
5561         return false;
5562     }
5563 
5564     constexpr VkFormatFeatureFlags kImageUsageFeatureBits =
5565         VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT | VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT |
5566         VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
5567 
5568     VkFormatFeatureFlags fmt1LinearFeatureBits =
5569         getLinearImageFormatFeatureBits(formatID1, kImageUsageFeatureBits);
5570     VkFormatFeatureFlags fmt1OptimalFeatureBits =
5571         getImageFormatFeatureBits(formatID1, kImageUsageFeatureBits);
5572 
5573     return hasLinearImageFormatFeatureBits(formatID2, fmt1LinearFeatureBits) &&
5574            hasImageFormatFeatureBits(formatID2, fmt1OptimalFeatureBits);
5575 }
5576 
cleanupGarbage()5577 void Renderer::cleanupGarbage()
5578 {
5579     // Clean up general garbage
5580     mSharedGarbageList.cleanupSubmittedGarbage(this);
5581     // Clean up suballocation garbages
5582     mSuballocationGarbageList.cleanupSubmittedGarbage(this);
5583     // Note: do this after clean up mSuballocationGarbageList so that we will have more chances to
5584     // find orphaned blocks being empty.
5585     mOrphanedBufferBlockList.pruneEmptyBufferBlocks(this);
5586     // Clean up RefCountedEvent that are done resetting
5587     mRefCountedEventRecycler.cleanupResettingEvents(this);
5588 }
5589 
cleanupPendingSubmissionGarbage()5590 void Renderer::cleanupPendingSubmissionGarbage()
5591 {
5592     // Check if pending garbage is still pending. If not, move them to the garbage list.
5593     mSharedGarbageList.cleanupUnsubmittedGarbage(this);
5594     mSuballocationGarbageList.cleanupUnsubmittedGarbage(this);
5595 }
5596 
onNewValidationMessage(const std::string & message)5597 void Renderer::onNewValidationMessage(const std::string &message)
5598 {
5599     mLastValidationMessage = message;
5600     ++mValidationMessageCount;
5601 }
5602 
onFramebufferFetchUsed()5603 void Renderer::onFramebufferFetchUsed()
5604 {
5605     mIsFramebufferFetchUsed = true;
5606 }
5607 
getAndClearLastValidationMessage(uint32_t * countSinceLastClear)5608 std::string Renderer::getAndClearLastValidationMessage(uint32_t *countSinceLastClear)
5609 {
5610     *countSinceLastClear    = mValidationMessageCount;
5611     mValidationMessageCount = 0;
5612 
5613     return std::move(mLastValidationMessage);
5614 }
5615 
getMaxFenceWaitTimeNs() const5616 uint64_t Renderer::getMaxFenceWaitTimeNs() const
5617 {
5618     constexpr uint64_t kMaxFenceWaitTimeNs = std::numeric_limits<uint64_t>::max();
5619 
5620     return kMaxFenceWaitTimeNs;
5621 }
5622 
setGlobalDebugAnnotator(bool * installedAnnotatorOut)5623 void Renderer::setGlobalDebugAnnotator(bool *installedAnnotatorOut)
5624 {
5625     // Install one of two DebugAnnotator classes:
5626     //
5627     // 1) The global class enables basic ANGLE debug functionality (e.g. Vulkan validation errors
5628     //    will cause dEQP tests to fail).
5629     //
5630     // 2) The DebugAnnotatorVk class processes OpenGL ES commands that the application uses.  It is
5631     //    installed for the following purposes:
5632     //
5633     //    1) To enable calling the vkCmd*DebugUtilsLabelEXT functions in order to communicate to
5634     //       debuggers (e.g. AGI) the OpenGL ES commands that the application uses.  In addition to
5635     //       simply installing DebugAnnotatorVk, also enable calling vkCmd*DebugUtilsLabelEXT.
5636     //
5637     //    2) To enable logging to Android logcat the OpenGL ES commands that the application uses.
5638     bool installDebugAnnotatorVk = false;
5639 
5640     // Enable calling the vkCmd*DebugUtilsLabelEXT functions if the vkCmd*DebugUtilsLabelEXT
5641     // functions exist, and if the kEnableDebugMarkersVarName environment variable is set.
5642     if (vkCmdBeginDebugUtilsLabelEXT)
5643     {
5644         // Use the GetAndSet variant to improve future lookup times
5645         std::string enabled = angle::GetAndSetEnvironmentVarOrUnCachedAndroidProperty(
5646             kEnableDebugMarkersVarName, kEnableDebugMarkersPropertyName);
5647         if (!enabled.empty() && enabled.compare("0") != 0)
5648         {
5649             mAngleDebuggerMode      = true;
5650             installDebugAnnotatorVk = true;
5651         }
5652     }
5653 #if defined(ANGLE_ENABLE_TRACE_ANDROID_LOGCAT)
5654     // Only install DebugAnnotatorVk to log all API commands to Android's logcat.
5655     installDebugAnnotatorVk = true;
5656 #endif
5657 
5658     {
5659         if (installDebugAnnotatorVk)
5660         {
5661             std::unique_lock<angle::SimpleMutex> lock(gl::GetDebugMutex());
5662             gl::InitializeDebugAnnotations(&mAnnotator);
5663         }
5664     }
5665 
5666     *installedAnnotatorOut = installDebugAnnotatorVk;
5667 }
5668 
reloadVolkIfNeeded() const5669 void Renderer::reloadVolkIfNeeded() const
5670 {
5671 #if defined(ANGLE_SHARED_LIBVULKAN)
5672     if ((mInstance != VK_NULL_HANDLE) && (volkGetLoadedInstance() != mInstance))
5673     {
5674         volkLoadInstance(mInstance);
5675     }
5676 
5677     if ((mDevice != VK_NULL_HANDLE) && (volkGetLoadedDevice() != mDevice))
5678     {
5679         volkLoadDevice(mDevice);
5680     }
5681 
5682     initializeInstanceExtensionEntryPointsFromCore();
5683     initializeDeviceExtensionEntryPointsFromCore();
5684 #endif  // defined(ANGLE_SHARED_LIBVULKAN)
5685 }
5686 
initializeInstanceExtensionEntryPointsFromCore() const5687 void Renderer::initializeInstanceExtensionEntryPointsFromCore() const
5688 {
5689     // Initialize extension entry points from core ones.  In some cases, such as VMA, the extension
5690     // entry point is unconditionally used.
5691     InitGetPhysicalDeviceProperties2KHRFunctionsFromCore();
5692     if (mFeatures.supportsExternalFenceCapabilities.enabled)
5693     {
5694         InitExternalFenceCapabilitiesFunctionsFromCore();
5695     }
5696     if (mFeatures.supportsExternalSemaphoreCapabilities.enabled)
5697     {
5698         InitExternalSemaphoreCapabilitiesFunctionsFromCore();
5699     }
5700 }
5701 
initializeDeviceExtensionEntryPointsFromCore() const5702 void Renderer::initializeDeviceExtensionEntryPointsFromCore() const
5703 {
5704     if (mFeatures.supportsGetMemoryRequirements2.enabled)
5705     {
5706         InitGetMemoryRequirements2KHRFunctionsFromCore();
5707     }
5708     if (mFeatures.supportsBindMemory2.enabled)
5709     {
5710         InitBindMemory2KHRFunctionsFromCore();
5711     }
5712     if (mFeatures.supportsYUVSamplerConversion.enabled)
5713     {
5714         InitSamplerYcbcrKHRFunctionsFromCore();
5715     }
5716 }
5717 
submitCommands(vk::Context * context,vk::ProtectionType protectionType,egl::ContextPriority contextPriority,const vk::Semaphore * signalSemaphore,const vk::SharedExternalFence * externalFence,const QueueSerial & submitQueueSerial)5718 angle::Result Renderer::submitCommands(vk::Context *context,
5719                                        vk::ProtectionType protectionType,
5720                                        egl::ContextPriority contextPriority,
5721                                        const vk::Semaphore *signalSemaphore,
5722                                        const vk::SharedExternalFence *externalFence,
5723                                        const QueueSerial &submitQueueSerial)
5724 {
5725     ASSERT(signalSemaphore == nullptr || signalSemaphore->valid());
5726     const VkSemaphore signalVkSemaphore =
5727         signalSemaphore ? signalSemaphore->getHandle() : VK_NULL_HANDLE;
5728 
5729     vk::SharedExternalFence externalFenceCopy;
5730     if (externalFence != nullptr)
5731     {
5732         externalFenceCopy = *externalFence;
5733     }
5734 
5735     if (isAsyncCommandQueueEnabled())
5736     {
5737         ANGLE_TRY(mCommandProcessor.enqueueSubmitCommands(
5738             context, protectionType, contextPriority, signalVkSemaphore,
5739             std::move(externalFenceCopy), submitQueueSerial));
5740     }
5741     else
5742     {
5743         ANGLE_TRY(mCommandQueue.submitCommands(context, protectionType, contextPriority,
5744                                                signalVkSemaphore, std::move(externalFenceCopy),
5745                                                submitQueueSerial));
5746     }
5747 
5748     ANGLE_TRY(mCommandQueue.postSubmitCheck(context));
5749 
5750     return angle::Result::Continue;
5751 }
5752 
submitPriorityDependency(vk::Context * context,vk::ProtectionTypes protectionTypes,egl::ContextPriority srcContextPriority,egl::ContextPriority dstContextPriority,SerialIndex index)5753 angle::Result Renderer::submitPriorityDependency(vk::Context *context,
5754                                                  vk::ProtectionTypes protectionTypes,
5755                                                  egl::ContextPriority srcContextPriority,
5756                                                  egl::ContextPriority dstContextPriority,
5757                                                  SerialIndex index)
5758 {
5759     RendererScoped<vk::ReleasableResource<vk::Semaphore>> semaphore(this);
5760     ANGLE_VK_TRY(context, semaphore.get().get().init(mDevice));
5761 
5762     // First, submit already flushed commands / wait semaphores into the source Priority VkQueue.
5763     // Commands that are in the Secondary Command Buffers will be flushed into the new VkQueue.
5764 
5765     // Submit commands and attach Signal Semaphore.
5766     ASSERT(protectionTypes.any());
5767     while (protectionTypes.any())
5768     {
5769         vk::ProtectionType protectionType = protectionTypes.first();
5770         protectionTypes.reset(protectionType);
5771 
5772         QueueSerial queueSerial(index, generateQueueSerial(index));
5773         // Submit semaphore only if this is the last submission (all into the same VkQueue).
5774         const vk::Semaphore *signalSemaphore = nullptr;
5775         if (protectionTypes.none())
5776         {
5777             // Update QueueSerial to collect semaphore using the latest possible queueSerial.
5778             semaphore.get().setQueueSerial(queueSerial);
5779             signalSemaphore = &semaphore.get().get();
5780         }
5781         ANGLE_TRY(submitCommands(context, protectionType, srcContextPriority, signalSemaphore,
5782                                  nullptr, queueSerial));
5783     }
5784 
5785     // Submit only Wait Semaphore into the destination Priority (VkQueue).
5786     QueueSerial queueSerial(index, generateQueueSerial(index));
5787     semaphore.get().setQueueSerial(queueSerial);
5788     ANGLE_TRY(queueSubmitWaitSemaphore(context, dstContextPriority, semaphore.get().get(),
5789                                        VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, queueSerial));
5790 
5791     return angle::Result::Continue;
5792 }
5793 
handleDeviceLost()5794 void Renderer::handleDeviceLost()
5795 {
5796     if (isAsyncCommandQueueEnabled())
5797     {
5798         mCommandProcessor.handleDeviceLost(this);
5799     }
5800     else
5801     {
5802         mCommandQueue.handleDeviceLost(this);
5803     }
5804 }
5805 
finishResourceUse(vk::Context * context,const vk::ResourceUse & use)5806 angle::Result Renderer::finishResourceUse(vk::Context *context, const vk::ResourceUse &use)
5807 {
5808     if (isAsyncCommandQueueEnabled())
5809     {
5810         ANGLE_TRY(mCommandProcessor.waitForResourceUseToBeSubmitted(context, use));
5811     }
5812     return mCommandQueue.finishResourceUse(context, use, getMaxFenceWaitTimeNs());
5813 }
5814 
finishQueueSerial(vk::Context * context,const QueueSerial & queueSerial)5815 angle::Result Renderer::finishQueueSerial(vk::Context *context, const QueueSerial &queueSerial)
5816 {
5817     ASSERT(queueSerial.valid());
5818     if (isAsyncCommandQueueEnabled())
5819     {
5820         ANGLE_TRY(mCommandProcessor.waitForQueueSerialToBeSubmitted(context, queueSerial));
5821     }
5822     return mCommandQueue.finishQueueSerial(context, queueSerial, getMaxFenceWaitTimeNs());
5823 }
5824 
waitForResourceUseToFinishWithUserTimeout(vk::Context * context,const vk::ResourceUse & use,uint64_t timeout,VkResult * result)5825 angle::Result Renderer::waitForResourceUseToFinishWithUserTimeout(vk::Context *context,
5826                                                                   const vk::ResourceUse &use,
5827                                                                   uint64_t timeout,
5828                                                                   VkResult *result)
5829 {
5830     ANGLE_TRACE_EVENT0("gpu.angle", "Renderer::waitForResourceUseToFinishWithUserTimeout");
5831     if (isAsyncCommandQueueEnabled())
5832     {
5833         ANGLE_TRY(mCommandProcessor.waitForResourceUseToBeSubmitted(context, use));
5834     }
5835     return mCommandQueue.waitForResourceUseToFinishWithUserTimeout(context, use, timeout, result);
5836 }
5837 
flushWaitSemaphores(vk::ProtectionType protectionType,egl::ContextPriority priority,std::vector<VkSemaphore> && waitSemaphores,std::vector<VkPipelineStageFlags> && waitSemaphoreStageMasks)5838 angle::Result Renderer::flushWaitSemaphores(
5839     vk::ProtectionType protectionType,
5840     egl::ContextPriority priority,
5841     std::vector<VkSemaphore> &&waitSemaphores,
5842     std::vector<VkPipelineStageFlags> &&waitSemaphoreStageMasks)
5843 {
5844     ANGLE_TRACE_EVENT0("gpu.angle", "Renderer::flushWaitSemaphores");
5845     if (isAsyncCommandQueueEnabled())
5846     {
5847         ANGLE_TRY(mCommandProcessor.enqueueFlushWaitSemaphores(protectionType, priority,
5848                                                                std::move(waitSemaphores),
5849                                                                std::move(waitSemaphoreStageMasks)));
5850     }
5851     else
5852     {
5853         mCommandQueue.flushWaitSemaphores(protectionType, priority, std::move(waitSemaphores),
5854                                           std::move(waitSemaphoreStageMasks));
5855     }
5856 
5857     return angle::Result::Continue;
5858 }
5859 
flushRenderPassCommands(vk::Context * context,vk::ProtectionType protectionType,egl::ContextPriority priority,const vk::RenderPass & renderPass,VkFramebuffer framebufferOverride,vk::RenderPassCommandBufferHelper ** renderPassCommands)5860 angle::Result Renderer::flushRenderPassCommands(
5861     vk::Context *context,
5862     vk::ProtectionType protectionType,
5863     egl::ContextPriority priority,
5864     const vk::RenderPass &renderPass,
5865     VkFramebuffer framebufferOverride,
5866     vk::RenderPassCommandBufferHelper **renderPassCommands)
5867 {
5868     ANGLE_TRACE_EVENT0("gpu.angle", "Renderer::flushRenderPassCommands");
5869     if (isAsyncCommandQueueEnabled())
5870     {
5871         ANGLE_TRY(mCommandProcessor.enqueueFlushRenderPassCommands(
5872             context, protectionType, priority, renderPass, framebufferOverride,
5873             renderPassCommands));
5874     }
5875     else
5876     {
5877         ANGLE_TRY(mCommandQueue.flushRenderPassCommands(context, protectionType, priority,
5878                                                         renderPass, framebufferOverride,
5879                                                         renderPassCommands));
5880     }
5881 
5882     return angle::Result::Continue;
5883 }
5884 
flushOutsideRPCommands(vk::Context * context,vk::ProtectionType protectionType,egl::ContextPriority priority,vk::OutsideRenderPassCommandBufferHelper ** outsideRPCommands)5885 angle::Result Renderer::flushOutsideRPCommands(
5886     vk::Context *context,
5887     vk::ProtectionType protectionType,
5888     egl::ContextPriority priority,
5889     vk::OutsideRenderPassCommandBufferHelper **outsideRPCommands)
5890 {
5891     ANGLE_TRACE_EVENT0("gpu.angle", "Renderer::flushOutsideRPCommands");
5892     if (isAsyncCommandQueueEnabled())
5893     {
5894         ANGLE_TRY(mCommandProcessor.enqueueFlushOutsideRPCommands(context, protectionType, priority,
5895                                                                   outsideRPCommands));
5896     }
5897     else
5898     {
5899         ANGLE_TRY(mCommandQueue.flushOutsideRPCommands(context, protectionType, priority,
5900                                                        outsideRPCommands));
5901     }
5902 
5903     return angle::Result::Continue;
5904 }
5905 
queuePresent(vk::Context * context,egl::ContextPriority priority,const VkPresentInfoKHR & presentInfo,vk::SwapchainStatus * swapchainStatus)5906 void Renderer::queuePresent(vk::Context *context,
5907                             egl::ContextPriority priority,
5908                             const VkPresentInfoKHR &presentInfo,
5909                             vk::SwapchainStatus *swapchainStatus)
5910 {
5911     if (isAsyncCommandQueueEnabled())
5912     {
5913         mCommandProcessor.enqueuePresent(priority, presentInfo, swapchainStatus);
5914         // lastPresentResult should always VK_SUCCESS when isPending is true
5915         ASSERT(!swapchainStatus->isPending || swapchainStatus->lastPresentResult == VK_SUCCESS);
5916     }
5917     else
5918     {
5919         mCommandQueue.queuePresent(priority, presentInfo, swapchainStatus);
5920         ASSERT(!swapchainStatus->isPending);
5921     }
5922 
5923     if (getFeatures().logMemoryReportStats.enabled)
5924     {
5925         mMemoryReport.logMemoryReportStats();
5926     }
5927 }
5928 
5929 template <typename CommandBufferHelperT, typename RecyclerT>
getCommandBufferImpl(vk::Context * context,vk::SecondaryCommandPool * commandPool,vk::SecondaryCommandMemoryAllocator * commandsAllocator,RecyclerT * recycler,CommandBufferHelperT ** commandBufferHelperOut)5930 angle::Result Renderer::getCommandBufferImpl(vk::Context *context,
5931                                              vk::SecondaryCommandPool *commandPool,
5932                                              vk::SecondaryCommandMemoryAllocator *commandsAllocator,
5933                                              RecyclerT *recycler,
5934                                              CommandBufferHelperT **commandBufferHelperOut)
5935 {
5936     return recycler->getCommandBufferHelper(context, commandPool, commandsAllocator,
5937                                             commandBufferHelperOut);
5938 }
5939 
getOutsideRenderPassCommandBufferHelper(vk::Context * context,vk::SecondaryCommandPool * commandPool,vk::SecondaryCommandMemoryAllocator * commandsAllocator,vk::OutsideRenderPassCommandBufferHelper ** commandBufferHelperOut)5940 angle::Result Renderer::getOutsideRenderPassCommandBufferHelper(
5941     vk::Context *context,
5942     vk::SecondaryCommandPool *commandPool,
5943     vk::SecondaryCommandMemoryAllocator *commandsAllocator,
5944     vk::OutsideRenderPassCommandBufferHelper **commandBufferHelperOut)
5945 {
5946     ANGLE_TRACE_EVENT0("gpu.angle", "Renderer::getOutsideRenderPassCommandBufferHelper");
5947     return getCommandBufferImpl(context, commandPool, commandsAllocator,
5948                                 &mOutsideRenderPassCommandBufferRecycler, commandBufferHelperOut);
5949 }
5950 
getRenderPassCommandBufferHelper(vk::Context * context,vk::SecondaryCommandPool * commandPool,vk::SecondaryCommandMemoryAllocator * commandsAllocator,vk::RenderPassCommandBufferHelper ** commandBufferHelperOut)5951 angle::Result Renderer::getRenderPassCommandBufferHelper(
5952     vk::Context *context,
5953     vk::SecondaryCommandPool *commandPool,
5954     vk::SecondaryCommandMemoryAllocator *commandsAllocator,
5955     vk::RenderPassCommandBufferHelper **commandBufferHelperOut)
5956 {
5957     ANGLE_TRACE_EVENT0("gpu.angle", "Renderer::getRenderPassCommandBufferHelper");
5958     return getCommandBufferImpl(context, commandPool, commandsAllocator,
5959                                 &mRenderPassCommandBufferRecycler, commandBufferHelperOut);
5960 }
5961 
recycleOutsideRenderPassCommandBufferHelper(vk::OutsideRenderPassCommandBufferHelper ** commandBuffer)5962 void Renderer::recycleOutsideRenderPassCommandBufferHelper(
5963     vk::OutsideRenderPassCommandBufferHelper **commandBuffer)
5964 {
5965     ANGLE_TRACE_EVENT0("gpu.angle", "Renderer::recycleOutsideRenderPassCommandBufferHelper");
5966     mOutsideRenderPassCommandBufferRecycler.recycleCommandBufferHelper(commandBuffer);
5967 }
5968 
recycleRenderPassCommandBufferHelper(vk::RenderPassCommandBufferHelper ** commandBuffer)5969 void Renderer::recycleRenderPassCommandBufferHelper(
5970     vk::RenderPassCommandBufferHelper **commandBuffer)
5971 {
5972     ANGLE_TRACE_EVENT0("gpu.angle", "Renderer::recycleRenderPassCommandBufferHelper");
5973     mRenderPassCommandBufferRecycler.recycleCommandBufferHelper(commandBuffer);
5974 }
5975 
logCacheStats() const5976 void Renderer::logCacheStats() const
5977 {
5978     if (!vk::kOutputCumulativePerfCounters)
5979     {
5980         return;
5981     }
5982 
5983     std::unique_lock<angle::SimpleMutex> localLock(mCacheStatsMutex);
5984 
5985     int cacheType = 0;
5986     INFO() << "Vulkan object cache hit ratios: ";
5987     for (const CacheStats &stats : mVulkanCacheStats)
5988     {
5989         INFO() << "    CacheType " << cacheType++ << ": " << stats.getHitRatio();
5990     }
5991 }
5992 
getFormatDescriptorCountForVkFormat(vk::Context * context,VkFormat format,uint32_t * descriptorCountOut)5993 angle::Result Renderer::getFormatDescriptorCountForVkFormat(vk::Context *context,
5994                                                             VkFormat format,
5995                                                             uint32_t *descriptorCountOut)
5996 {
5997     if (mVkFormatDescriptorCountMap.count(format) == 0)
5998     {
5999         // Query device for descriptor count with basic values for most of
6000         // VkPhysicalDeviceImageFormatInfo2 members.
6001         VkPhysicalDeviceImageFormatInfo2 imageFormatInfo = {};
6002         imageFormatInfo.sType  = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
6003         imageFormatInfo.format = format;
6004         imageFormatInfo.type   = VK_IMAGE_TYPE_2D;
6005         imageFormatInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
6006         imageFormatInfo.usage  = VK_IMAGE_USAGE_SAMPLED_BIT;
6007         imageFormatInfo.flags  = 0;
6008 
6009         VkImageFormatProperties imageFormatProperties                            = {};
6010         VkSamplerYcbcrConversionImageFormatProperties ycbcrImageFormatProperties = {};
6011         ycbcrImageFormatProperties.sType =
6012             VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES;
6013 
6014         VkImageFormatProperties2 imageFormatProperties2 = {};
6015         imageFormatProperties2.sType                 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
6016         imageFormatProperties2.pNext                 = &ycbcrImageFormatProperties;
6017         imageFormatProperties2.imageFormatProperties = imageFormatProperties;
6018 
6019         ANGLE_VK_TRY(context, vkGetPhysicalDeviceImageFormatProperties2(
6020                                   mPhysicalDevice, &imageFormatInfo, &imageFormatProperties2));
6021 
6022         mVkFormatDescriptorCountMap[format] =
6023             ycbcrImageFormatProperties.combinedImageSamplerDescriptorCount;
6024     }
6025 
6026     ASSERT(descriptorCountOut);
6027     *descriptorCountOut = mVkFormatDescriptorCountMap[format];
6028     return angle::Result::Continue;
6029 }
6030 
getFormatDescriptorCountForExternalFormat(vk::Context * context,uint64_t format,uint32_t * descriptorCountOut)6031 angle::Result Renderer::getFormatDescriptorCountForExternalFormat(vk::Context *context,
6032                                                                   uint64_t format,
6033                                                                   uint32_t *descriptorCountOut)
6034 {
6035     ASSERT(descriptorCountOut);
6036 
6037     // TODO: need to query for external formats as well once spec is fixed. http://anglebug.com/6141
6038     ANGLE_VK_CHECK(context, getFeatures().useMultipleDescriptorsForExternalFormats.enabled,
6039                    VK_ERROR_INCOMPATIBLE_DRIVER);
6040 
6041     // Vulkan spec has a gap in that there is no mechanism available to query the immutable
6042     // sampler descriptor count of an external format. For now, return a default value.
6043     constexpr uint32_t kExternalFormatDefaultDescriptorCount = 4;
6044     *descriptorCountOut = kExternalFormatDefaultDescriptorCount;
6045     return angle::Result::Continue;
6046 }
6047 
onAllocateHandle(vk::HandleType handleType)6048 void Renderer::onAllocateHandle(vk::HandleType handleType)
6049 {
6050     std::unique_lock<angle::SimpleMutex> localLock(mActiveHandleCountsMutex);
6051     mActiveHandleCounts.onAllocate(handleType);
6052 }
6053 
onDeallocateHandle(vk::HandleType handleType)6054 void Renderer::onDeallocateHandle(vk::HandleType handleType)
6055 {
6056     std::unique_lock<angle::SimpleMutex> localLock(mActiveHandleCountsMutex);
6057     mActiveHandleCounts.onDeallocate(handleType);
6058 }
6059 
getPreferedBufferBlockSize(uint32_t memoryTypeIndex) const6060 VkDeviceSize Renderer::getPreferedBufferBlockSize(uint32_t memoryTypeIndex) const
6061 {
6062     // Try not to exceed 1/64 of heap size to begin with.
6063     const VkDeviceSize heapSize = getMemoryProperties().getHeapSizeForMemoryType(memoryTypeIndex);
6064     return std::min(heapSize / 64, mPreferredLargeHeapBlockSize);
6065 }
6066 
allocateScopedQueueSerialIndex(vk::ScopedQueueSerialIndex * indexOut)6067 angle::Result Renderer::allocateScopedQueueSerialIndex(vk::ScopedQueueSerialIndex *indexOut)
6068 {
6069     SerialIndex index;
6070     ANGLE_TRY(allocateQueueSerialIndex(&index));
6071     indexOut->init(index, &mQueueSerialIndexAllocator);
6072     return angle::Result::Continue;
6073 }
6074 
allocateQueueSerialIndex(SerialIndex * serialIndexOut)6075 angle::Result Renderer::allocateQueueSerialIndex(SerialIndex *serialIndexOut)
6076 {
6077     *serialIndexOut = mQueueSerialIndexAllocator.allocate();
6078     if (*serialIndexOut == kInvalidQueueSerialIndex)
6079     {
6080         return angle::Result::Stop;
6081     }
6082     return angle::Result::Continue;
6083 }
6084 
releaseQueueSerialIndex(SerialIndex index)6085 void Renderer::releaseQueueSerialIndex(SerialIndex index)
6086 {
6087     mQueueSerialIndexAllocator.release(index);
6088 }
6089 
finishOneCommandBatchAndCleanup(vk::Context * context,bool * anyBatchCleaned)6090 angle::Result Renderer::finishOneCommandBatchAndCleanup(vk::Context *context, bool *anyBatchCleaned)
6091 {
6092     return mCommandQueue.finishOneCommandBatchAndCleanup(context, getMaxFenceWaitTimeNs(),
6093                                                          anyBatchCleaned);
6094 }
6095 
6096 // static
GetVulkanObjectTypeName(VkObjectType type)6097 const char *Renderer::GetVulkanObjectTypeName(VkObjectType type)
6098 {
6099     return GetVkObjectTypeName(type);
6100 }
6101 
ImageMemorySuballocator()6102 ImageMemorySuballocator::ImageMemorySuballocator() {}
~ImageMemorySuballocator()6103 ImageMemorySuballocator::~ImageMemorySuballocator() {}
6104 
destroy(Renderer * renderer)6105 void ImageMemorySuballocator::destroy(Renderer *renderer) {}
6106 
allocateAndBindMemory(Context * context,Image * image,const VkImageCreateInfo * imageCreateInfo,VkMemoryPropertyFlags requiredFlags,VkMemoryPropertyFlags preferredFlags,const VkMemoryRequirements * memoryRequirements,const bool allocateDedicatedMemory,MemoryAllocationType memoryAllocationType,Allocation * allocationOut,VkMemoryPropertyFlags * memoryFlagsOut,uint32_t * memoryTypeIndexOut,VkDeviceSize * sizeOut)6107 VkResult ImageMemorySuballocator::allocateAndBindMemory(
6108     Context *context,
6109     Image *image,
6110     const VkImageCreateInfo *imageCreateInfo,
6111     VkMemoryPropertyFlags requiredFlags,
6112     VkMemoryPropertyFlags preferredFlags,
6113     const VkMemoryRequirements *memoryRequirements,
6114     const bool allocateDedicatedMemory,
6115     MemoryAllocationType memoryAllocationType,
6116     Allocation *allocationOut,
6117     VkMemoryPropertyFlags *memoryFlagsOut,
6118     uint32_t *memoryTypeIndexOut,
6119     VkDeviceSize *sizeOut)
6120 {
6121     ASSERT(image && image->valid());
6122     ASSERT(allocationOut && !allocationOut->valid());
6123     Renderer *renderer         = context->getRenderer();
6124     const Allocator &allocator = renderer->getAllocator();
6125 
6126     // Avoid device-local and host-visible combinations if possible. Here, "preferredFlags" is
6127     // expected to be the same as "requiredFlags" except in the device-local bit.
6128     ASSERT((preferredFlags & ~VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) ==
6129            (requiredFlags & ~VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT));
6130 
6131     uint32_t memoryTypeBits = memoryRequirements->memoryTypeBits;
6132     if ((requiredFlags & preferredFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6133     {
6134         memoryTypeBits = GetMemoryTypeBitsExcludingHostVisible(renderer, preferredFlags,
6135                                                                memoryRequirements->memoryTypeBits);
6136     }
6137 
6138     // Allocate and bind memory for the image. Try allocating on the device first.
6139     VkResult result = vma::AllocateAndBindMemoryForImage(
6140         allocator.getHandle(), &image->mHandle, requiredFlags, preferredFlags, memoryTypeBits,
6141         allocateDedicatedMemory, &allocationOut->mHandle, memoryTypeIndexOut, sizeOut);
6142 
6143     // We need to get the property flags of the allocated memory if successful.
6144     if (result == VK_SUCCESS)
6145     {
6146         *memoryFlagsOut =
6147             renderer->getMemoryProperties().getMemoryType(*memoryTypeIndexOut).propertyFlags;
6148 
6149         renderer->onMemoryAlloc(memoryAllocationType, *sizeOut, *memoryTypeIndexOut,
6150                                 allocationOut->getHandle());
6151     }
6152     return result;
6153 }
6154 
mapMemoryAndInitWithNonZeroValue(Renderer * renderer,Allocation * allocation,VkDeviceSize size,int value,VkMemoryPropertyFlags flags)6155 VkResult ImageMemorySuballocator::mapMemoryAndInitWithNonZeroValue(Renderer *renderer,
6156                                                                    Allocation *allocation,
6157                                                                    VkDeviceSize size,
6158                                                                    int value,
6159                                                                    VkMemoryPropertyFlags flags)
6160 {
6161     ASSERT(allocation && allocation->valid());
6162     const Allocator &allocator = renderer->getAllocator();
6163 
6164     void *mappedMemoryData;
6165     VkResult result = vma::MapMemory(allocator.getHandle(), allocation->mHandle, &mappedMemoryData);
6166     if (result != VK_SUCCESS)
6167     {
6168         return result;
6169     }
6170 
6171     memset(mappedMemoryData, value, static_cast<size_t>(size));
6172     vma::UnmapMemory(allocator.getHandle(), allocation->mHandle);
6173 
6174     // If the memory type is not host coherent, we perform an explicit flush.
6175     if ((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
6176     {
6177         vma::FlushAllocation(allocator.getHandle(), allocation->mHandle, 0, VK_WHOLE_SIZE);
6178     }
6179 
6180     return VK_SUCCESS;
6181 }
6182 
needsDedicatedMemory(VkDeviceSize size) const6183 bool ImageMemorySuballocator::needsDedicatedMemory(VkDeviceSize size) const
6184 {
6185     return size >= kImageSizeThresholdForDedicatedMemoryAllocation;
6186 }
6187 
6188 }  // namespace vk
6189 }  // namespace rx
6190