• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // RendererVk.h:
7 //    Defines the class interface for RendererVk.
8 //
9 
10 #ifndef LIBANGLE_RENDERER_VULKAN_RENDERERVK_H_
11 #define LIBANGLE_RENDERER_VULKAN_RENDERERVK_H_
12 
13 #include <condition_variable>
14 #include <deque>
15 #include <memory>
16 #include <mutex>
17 #include <queue>
18 #include <thread>
19 
20 #include "common/PackedEnums.h"
21 #include "common/WorkerThread.h"
22 #include "common/angleutils.h"
23 #include "common/vulkan/vk_headers.h"
24 #include "common/vulkan/vulkan_icd.h"
25 #include "libANGLE/BlobCache.h"
26 #include "libANGLE/Caps.h"
27 #include "libANGLE/renderer/vulkan/CommandProcessor.h"
28 #include "libANGLE/renderer/vulkan/DebugAnnotatorVk.h"
29 #include "libANGLE/renderer/vulkan/MemoryTracking.h"
30 #include "libANGLE/renderer/vulkan/QueryVk.h"
31 #include "libANGLE/renderer/vulkan/ResourceVk.h"
32 #include "libANGLE/renderer/vulkan/UtilsVk.h"
33 #include "libANGLE/renderer/vulkan/vk_format_utils.h"
34 #include "libANGLE/renderer/vulkan/vk_helpers.h"
35 #include "libANGLE/renderer/vulkan/vk_internal_shaders_autogen.h"
36 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
37 
38 namespace angle
39 {
40 class Library;
41 struct FrontendFeatures;
42 }  // namespace angle
43 
44 namespace egl
45 {
46 class Display;
47 class BlobCache;
48 }  // namespace egl
49 
50 namespace rx
51 {
52 class DisplayVk;
53 class FramebufferVk;
54 
55 namespace vk
56 {
57 class Format;
58 
59 static constexpr size_t kMaxExtensionNames = 400;
60 using ExtensionNameList                    = angle::FixedVector<const char *, kMaxExtensionNames>;
61 
62 // Information used to accurately skip known synchronization issues in ANGLE.
63 struct SkippedSyncvalMessage
64 {
65     const char *messageId;
66     const char *messageContents1;
67     const char *messageContents2                      = "";
68     bool isDueToNonConformantCoherentFramebufferFetch = false;
69 };
70 
71 class ImageMemorySuballocator : angle::NonCopyable
72 {
73   public:
74     ImageMemorySuballocator();
75     ~ImageMemorySuballocator();
76 
77     void destroy(RendererVk *renderer);
78 
79     // Allocates memory for the image and binds it.
80     VkResult allocateAndBindMemory(Context *context,
81                                    Image *image,
82                                    const VkImageCreateInfo *imageCreateInfo,
83                                    VkMemoryPropertyFlags requiredFlags,
84                                    VkMemoryPropertyFlags preferredFlags,
85                                    MemoryAllocationType memoryAllocationType,
86                                    Allocation *allocationOut,
87                                    VkMemoryPropertyFlags *memoryFlagsOut,
88                                    uint32_t *memoryTypeIndexOut,
89                                    VkDeviceSize *sizeOut);
90 
91     // Maps the memory to initialize with non-zero value.
92     VkResult mapMemoryAndInitWithNonZeroValue(RendererVk *renderer,
93                                               Allocation *allocation,
94                                               VkDeviceSize size,
95                                               int value,
96                                               VkMemoryPropertyFlags flags);
97 };
98 }  // namespace vk
99 
100 // Supports one semaphore from current surface, and one semaphore passed to
101 // glSignalSemaphoreEXT.
102 using SignalSemaphoreVector = angle::FixedVector<VkSemaphore, 2>;
103 
104 // Recursive function to process variable arguments for garbage collection
CollectGarbage(std::vector<vk::GarbageObject> * garbageOut)105 inline void CollectGarbage(std::vector<vk::GarbageObject> *garbageOut) {}
106 template <typename ArgT, typename... ArgsT>
CollectGarbage(std::vector<vk::GarbageObject> * garbageOut,ArgT object,ArgsT...objectsIn)107 void CollectGarbage(std::vector<vk::GarbageObject> *garbageOut, ArgT object, ArgsT... objectsIn)
108 {
109     if (object->valid())
110     {
111         garbageOut->emplace_back(vk::GarbageObject::Get(object));
112     }
113     CollectGarbage(garbageOut, objectsIn...);
114 }
115 
116 // Recursive function to process variable arguments for garbage destroy
DestroyGarbage(VkDevice device)117 inline void DestroyGarbage(VkDevice device) {}
118 template <typename ArgT, typename... ArgsT>
DestroyGarbage(VkDevice device,ArgT object,ArgsT...objectsIn)119 void DestroyGarbage(VkDevice device, ArgT object, ArgsT... objectsIn)
120 {
121     if (object->valid())
122     {
123         object->destroy(device);
124     }
125     DestroyGarbage(device, objectsIn...);
126 }
127 
128 class WaitableCompressEvent
129 {
130   public:
WaitableCompressEvent(std::shared_ptr<angle::WaitableEvent> waitableEvent)131     WaitableCompressEvent(std::shared_ptr<angle::WaitableEvent> waitableEvent)
132         : mWaitableEvent(waitableEvent)
133     {}
134 
~WaitableCompressEvent()135     virtual ~WaitableCompressEvent() {}
136 
wait()137     void wait() { return mWaitableEvent->wait(); }
138 
isReady()139     bool isReady() { return mWaitableEvent->isReady(); }
140 
141   private:
142     std::shared_ptr<angle::WaitableEvent> mWaitableEvent;
143 };
144 
145 class OneOffCommandPool : angle::NonCopyable
146 {
147   public:
148     OneOffCommandPool();
149     void init(vk::ProtectionType protectionType);
150     angle::Result getCommandBuffer(vk::Context *context,
151                                    vk::PrimaryCommandBuffer *commandBufferOut);
152     void releaseCommandBuffer(const QueueSerial &submitQueueSerial,
153                               vk::PrimaryCommandBuffer &&primary);
154     void destroy(VkDevice device);
155 
156   private:
157     vk::ProtectionType mProtectionType;
158     std::mutex mMutex;
159     vk::CommandPool mCommandPool;
160     struct PendingOneOffCommands
161     {
162         vk::ResourceUse use;
163         vk::PrimaryCommandBuffer commandBuffer;
164     };
165     std::deque<PendingOneOffCommands> mPendingCommands;
166 };
167 
168 class RendererVk : angle::NonCopyable
169 {
170   public:
171     RendererVk();
172     ~RendererVk();
173 
174     angle::Result initialize(DisplayVk *displayVk,
175                              egl::Display *display,
176                              const char *wsiExtension,
177                              const char *wsiLayer);
178     // Reload volk vk* function ptrs if needed for an already initialized RendererVk
179     void reloadVolkIfNeeded() const;
180     void onDestroy(vk::Context *context);
181 
182     void notifyDeviceLost();
183     bool isDeviceLost() const;
184     bool hasSharedGarbage();
185 
186     std::string getVendorString() const;
187     std::string getRendererDescription() const;
188     std::string getVersionString(bool includeFullVersion) const;
189 
190     gl::Version getMaxSupportedESVersion() const;
191     gl::Version getMaxConformantESVersion() const;
192 
193     uint32_t getDeviceVersion();
getInstance()194     VkInstance getInstance() const { return mInstance; }
getPhysicalDevice()195     VkPhysicalDevice getPhysicalDevice() const { return mPhysicalDevice; }
getPhysicalDeviceProperties()196     const VkPhysicalDeviceProperties &getPhysicalDeviceProperties() const
197     {
198         return mPhysicalDeviceProperties;
199     }
getPhysicalDeviceDrmProperties()200     const VkPhysicalDeviceDrmPropertiesEXT &getPhysicalDeviceDrmProperties() const
201     {
202         return mDrmProperties;
203     }
204     const VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &
getPhysicalDevicePrimitivesGeneratedQueryFeatures()205     getPhysicalDevicePrimitivesGeneratedQueryFeatures() const
206     {
207         return mPrimitivesGeneratedQueryFeatures;
208     }
getPhysicalDeviceFeatures()209     const VkPhysicalDeviceFeatures &getPhysicalDeviceFeatures() const
210     {
211         return mPhysicalDeviceFeatures;
212     }
getEnabledFeatures()213     const VkPhysicalDeviceFeatures2KHR &getEnabledFeatures() const { return mEnabledFeatures; }
getDevice()214     VkDevice getDevice() const { return mDevice; }
215 
216     bool isVulkan11Instance() const;
217     bool isVulkan11Device() const;
218 
getAllocator()219     const vk::Allocator &getAllocator() const { return mAllocator; }
getImageMemorySuballocator()220     vk::ImageMemorySuballocator &getImageMemorySuballocator() { return mImageMemorySuballocator; }
221 
222     angle::Result selectPresentQueueForSurface(DisplayVk *displayVk,
223                                                VkSurfaceKHR surface,
224                                                uint32_t *presentQueueOut);
225 
226     const gl::Caps &getNativeCaps() const;
227     const gl::TextureCapsMap &getNativeTextureCaps() const;
228     const gl::Extensions &getNativeExtensions() const;
229     const gl::Limitations &getNativeLimitations() const;
230     const ShPixelLocalStorageOptions &getNativePixelLocalStorageOptions() const;
231     void initializeFrontendFeatures(angle::FrontendFeatures *features) const;
232 
getQueueFamilyIndex()233     uint32_t getQueueFamilyIndex() const { return mCurrentQueueFamilyIndex; }
getQueueFamilyProperties()234     const VkQueueFamilyProperties &getQueueFamilyProperties() const
235     {
236         return mQueueFamilyProperties[mCurrentQueueFamilyIndex];
237     }
238 
getMemoryProperties()239     const vk::MemoryProperties &getMemoryProperties() const { return mMemoryProperties; }
240 
getFormat(GLenum internalFormat)241     const vk::Format &getFormat(GLenum internalFormat) const
242     {
243         return mFormatTable[internalFormat];
244     }
245 
getFormat(angle::FormatID formatID)246     const vk::Format &getFormat(angle::FormatID formatID) const { return mFormatTable[formatID]; }
247 
248     angle::Result getPipelineCacheSize(DisplayVk *displayVk, size_t *pipelineCacheSizeOut);
249     angle::Result syncPipelineCacheVk(DisplayVk *displayVk, const gl::Context *context);
250 
getFeatures()251     const angle::FeaturesVk &getFeatures() const { return mFeatures; }
getMaxVertexAttribDivisor()252     uint32_t getMaxVertexAttribDivisor() const { return mMaxVertexAttribDivisor; }
getMaxVertexAttribStride()253     VkDeviceSize getMaxVertexAttribStride() const { return mMaxVertexAttribStride; }
254 
getDefaultUniformBufferSize()255     uint32_t getDefaultUniformBufferSize() const { return mDefaultUniformBufferSize; }
256 
getEnabledICD()257     angle::vk::ICD getEnabledICD() const { return mEnabledICD; }
isMockICDEnabled()258     bool isMockICDEnabled() const { return mEnabledICD == angle::vk::ICD::Mock; }
259 
260     // Query the format properties for select bits (linearTilingFeatures, optimalTilingFeatures
261     // and bufferFeatures).  Looks through mandatory features first, and falls back to querying
262     // the device (first time only).
263     bool hasLinearImageFormatFeatureBits(angle::FormatID format,
264                                          const VkFormatFeatureFlags featureBits) const;
265     VkFormatFeatureFlags getLinearImageFormatFeatureBits(
266         angle::FormatID format,
267         const VkFormatFeatureFlags featureBits) const;
268     VkFormatFeatureFlags getImageFormatFeatureBits(angle::FormatID format,
269                                                    const VkFormatFeatureFlags featureBits) const;
270     bool hasImageFormatFeatureBits(angle::FormatID format,
271                                    const VkFormatFeatureFlags featureBits) const;
272     bool hasBufferFormatFeatureBits(angle::FormatID format,
273                                     const VkFormatFeatureFlags featureBits) const;
274 
isAsyncCommandQueueEnabled()275     bool isAsyncCommandQueueEnabled() const { return mFeatures.asyncCommandQueue.enabled; }
isAsyncCommandBufferResetEnabled()276     bool isAsyncCommandBufferResetEnabled() const
277     {
278         return mFeatures.asyncCommandBufferReset.enabled;
279     }
280 
getDriverPriority(egl::ContextPriority priority)281     ANGLE_INLINE egl::ContextPriority getDriverPriority(egl::ContextPriority priority)
282     {
283         return mCommandQueue.getDriverPriority(priority);
284     }
getDeviceQueueIndex()285     ANGLE_INLINE uint32_t getDeviceQueueIndex() { return mCommandQueue.getDeviceQueueIndex(); }
286 
getQueue(egl::ContextPriority priority)287     VkQueue getQueue(egl::ContextPriority priority) { return mCommandQueue.getQueue(priority); }
288 
289     // This command buffer should be submitted immediately via queueSubmitOneOff.
getCommandBufferOneOff(vk::Context * context,vk::ProtectionType protectionType,vk::PrimaryCommandBuffer * commandBufferOut)290     angle::Result getCommandBufferOneOff(vk::Context *context,
291                                          vk::ProtectionType protectionType,
292                                          vk::PrimaryCommandBuffer *commandBufferOut)
293     {
294         return mOneOffCommandPoolMap[protectionType].getCommandBuffer(context, commandBufferOut);
295     }
296 
297     // Fire off a single command buffer immediately with default priority.
298     // Command buffer must be allocated with getCommandBufferOneOff and is reclaimed.
299     angle::Result queueSubmitOneOff(vk::Context *context,
300                                     vk::PrimaryCommandBuffer &&primary,
301                                     vk::ProtectionType protectionType,
302                                     egl::ContextPriority priority,
303                                     VkSemaphore waitSemaphore,
304                                     VkPipelineStageFlags waitSemaphoreStageMasks,
305                                     vk::SubmitPolicy submitPolicy,
306                                     QueueSerial *queueSerialOut);
307 
308     angle::Result queueSubmitWaitSemaphore(vk::Context *context,
309                                            egl::ContextPriority priority,
310                                            const vk::Semaphore &waitSemaphore,
311                                            VkPipelineStageFlags waitSemaphoreStageMasks,
312                                            QueueSerial submitQueueSerial);
313 
314     template <typename... ArgsT>
collectGarbage(const vk::ResourceUse & use,ArgsT...garbageIn)315     void collectGarbage(const vk::ResourceUse &use, ArgsT... garbageIn)
316     {
317         if (hasResourceUseFinished(use))
318         {
319             DestroyGarbage(mDevice, garbageIn...);
320         }
321         else
322         {
323             std::vector<vk::GarbageObject> sharedGarbage;
324             CollectGarbage(&sharedGarbage, garbageIn...);
325             if (!sharedGarbage.empty())
326             {
327                 collectGarbage(use, std::move(sharedGarbage));
328             }
329         }
330     }
331 
collectAllocationGarbage(const vk::ResourceUse & use,vk::Allocation & allocationGarbageIn)332     void collectAllocationGarbage(const vk::ResourceUse &use, vk::Allocation &allocationGarbageIn)
333     {
334         if (!allocationGarbageIn.valid())
335         {
336             return;
337         }
338 
339         if (hasResourceUseFinished(use))
340         {
341             allocationGarbageIn.destroy(getAllocator());
342         }
343         else
344         {
345             std::vector<vk::GarbageObject> sharedGarbage;
346             CollectGarbage(&sharedGarbage, &allocationGarbageIn);
347             if (!sharedGarbage.empty())
348             {
349                 collectGarbage(use, std::move(sharedGarbage));
350             }
351         }
352     }
353 
collectGarbage(const vk::ResourceUse & use,vk::GarbageList && sharedGarbage)354     void collectGarbage(const vk::ResourceUse &use, vk::GarbageList &&sharedGarbage)
355     {
356         ASSERT(!sharedGarbage.empty());
357         vk::SharedGarbage garbage(use, std::move(sharedGarbage));
358         if (!hasResourceUseSubmitted(use))
359         {
360             std::unique_lock<std::mutex> lock(mGarbageMutex);
361             mPendingSubmissionGarbage.push(std::move(garbage));
362         }
363         else if (!garbage.destroyIfComplete(this))
364         {
365             std::unique_lock<std::mutex> lock(mGarbageMutex);
366             mSharedGarbage.push(std::move(garbage));
367         }
368     }
369 
collectSuballocationGarbage(const vk::ResourceUse & use,vk::BufferSuballocation && suballocation,vk::Buffer && buffer)370     void collectSuballocationGarbage(const vk::ResourceUse &use,
371                                      vk::BufferSuballocation &&suballocation,
372                                      vk::Buffer &&buffer)
373     {
374         if (hasResourceUseFinished(use))
375         {
376             // mSuballocationGarbageDestroyed is atomic, so we dont need mGarbageMutex to
377             // protect it.
378             mSuballocationGarbageDestroyed += suballocation.getSize();
379             buffer.destroy(mDevice);
380             suballocation.destroy(this);
381         }
382         else
383         {
384             std::unique_lock<std::mutex> lock(mGarbageMutex);
385             if (hasResourceUseSubmitted(use))
386             {
387                 mSuballocationGarbageSizeInBytes += suballocation.getSize();
388                 mSuballocationGarbage.emplace(use, std::move(suballocation), std::move(buffer));
389             }
390             else
391             {
392                 mPendingSubmissionSuballocationGarbage.emplace(use, std::move(suballocation),
393                                                                std::move(buffer));
394             }
395         }
396     }
397 
398     angle::Result getPipelineCache(vk::PipelineCacheAccess *pipelineCacheOut);
399     angle::Result mergeIntoPipelineCache(const vk::PipelineCache &pipelineCache);
400 
401     void onNewValidationMessage(const std::string &message);
402     std::string getAndClearLastValidationMessage(uint32_t *countSinceLastClear);
403 
getSkippedValidationMessages()404     const std::vector<const char *> &getSkippedValidationMessages() const
405     {
406         return mSkippedValidationMessages;
407     }
getSkippedSyncvalMessages()408     const std::vector<vk::SkippedSyncvalMessage> &getSkippedSyncvalMessages() const
409     {
410         return mSkippedSyncvalMessages;
411     }
412 
413     void onFramebufferFetchUsed();
isFramebufferFetchUsed()414     bool isFramebufferFetchUsed() const { return mIsFramebufferFetchUsed; }
415 
416     uint64_t getMaxFenceWaitTimeNs() const;
417 
isCommandQueueBusy()418     ANGLE_INLINE bool isCommandQueueBusy()
419     {
420         if (isAsyncCommandQueueEnabled())
421         {
422             return mCommandProcessor.isBusy(this);
423         }
424         else
425         {
426             return mCommandQueue.isBusy(this);
427         }
428     }
429 
waitForResourceUseToBeSubmittedToDevice(vk::Context * context,const vk::ResourceUse & use)430     angle::Result waitForResourceUseToBeSubmittedToDevice(vk::Context *context,
431                                                           const vk::ResourceUse &use)
432     {
433         // This is only needed for async submission code path. For immediate submission, it is a nop
434         // since everything is submitted immediately.
435         if (isAsyncCommandQueueEnabled())
436         {
437             ASSERT(mCommandProcessor.hasResourceUseEnqueued(use));
438             return mCommandProcessor.waitForResourceUseToBeSubmitted(context, use);
439         }
440         // This ResourceUse must have been submitted.
441         ASSERT(mCommandQueue.hasResourceUseSubmitted(use));
442         return angle::Result::Continue;
443     }
444 
waitForQueueSerialToBeSubmittedToDevice(vk::Context * context,const QueueSerial & queueSerial)445     angle::Result waitForQueueSerialToBeSubmittedToDevice(vk::Context *context,
446                                                           const QueueSerial &queueSerial)
447     {
448         // This is only needed for async submission code path. For immediate submission, it is a nop
449         // since everything is submitted immediately.
450         if (isAsyncCommandQueueEnabled())
451         {
452             ASSERT(mCommandProcessor.hasQueueSerialEnqueued(queueSerial));
453             return mCommandProcessor.waitForQueueSerialToBeSubmitted(context, queueSerial);
454         }
455         // This queueSerial must have been submitted.
456         ASSERT(mCommandQueue.hasQueueSerialSubmitted(queueSerial));
457         return angle::Result::Continue;
458     }
459 
getCommandQueuePerfCounters()460     angle::VulkanPerfCounters getCommandQueuePerfCounters()
461     {
462         return mCommandQueue.getPerfCounters();
463     }
resetCommandQueuePerFrameCounters()464     void resetCommandQueuePerFrameCounters() { mCommandQueue.resetPerFramePerfCounters(); }
465 
getDisplay()466     egl::Display *getDisplay() const { return mDisplay; }
467 
enableDebugUtils()468     bool enableDebugUtils() const { return mEnableDebugUtils; }
angleDebuggerMode()469     bool angleDebuggerMode() const { return mAngleDebuggerMode; }
470 
getSamplerCache()471     SamplerCache &getSamplerCache() { return mSamplerCache; }
getYuvConversionCache()472     SamplerYcbcrConversionCache &getYuvConversionCache() { return mYuvConversionCache; }
473 
474     void onAllocateHandle(vk::HandleType handleType);
475     void onDeallocateHandle(vk::HandleType handleType);
476 
getEnableValidationLayers()477     bool getEnableValidationLayers() const { return mEnableValidationLayers; }
478 
getResourceSerialFactory()479     vk::ResourceSerialFactory &getResourceSerialFactory() { return mResourceSerialFactory; }
480 
481     void setGlobalDebugAnnotator();
482 
483     void outputVmaStatString();
484 
485     bool haveSameFormatFeatureBits(angle::FormatID formatID1, angle::FormatID formatID2) const;
486 
487     void cleanupGarbage();
488     void cleanupPendingSubmissionGarbage();
489 
490     angle::Result submitCommands(vk::Context *context,
491                                  vk::ProtectionType protectionType,
492                                  egl::ContextPriority contextPriority,
493                                  const vk::Semaphore *signalSemaphore,
494                                  const vk::SharedExternalFence *externalFence,
495                                  const QueueSerial &submitQueueSerial);
496 
497     angle::Result submitPriorityDependency(vk::Context *context,
498                                            vk::ProtectionTypes protectionTypes,
499                                            egl::ContextPriority srcContextPriority,
500                                            egl::ContextPriority dstContextPriority,
501                                            SerialIndex index);
502 
503     void handleDeviceLost();
504     angle::Result finishResourceUse(vk::Context *context, const vk::ResourceUse &use);
505     angle::Result finishQueueSerial(vk::Context *context, const QueueSerial &queueSerial);
506     angle::Result waitForResourceUseToFinishWithUserTimeout(vk::Context *context,
507                                                             const vk::ResourceUse &use,
508                                                             uint64_t timeout,
509                                                             VkResult *result);
510     angle::Result checkCompletedCommands(vk::Context *context);
511     angle::Result retireFinishedCommands(vk::Context *context);
512 
513     angle::Result flushWaitSemaphores(vk::ProtectionType protectionType,
514                                       egl::ContextPriority priority,
515                                       std::vector<VkSemaphore> &&waitSemaphores,
516                                       std::vector<VkPipelineStageFlags> &&waitSemaphoreStageMasks);
517     angle::Result flushRenderPassCommands(vk::Context *context,
518                                           vk::ProtectionType protectionType,
519                                           egl::ContextPriority priority,
520                                           const vk::RenderPass &renderPass,
521                                           vk::RenderPassCommandBufferHelper **renderPassCommands);
522     angle::Result flushOutsideRPCommands(
523         vk::Context *context,
524         vk::ProtectionType protectionType,
525         egl::ContextPriority priority,
526         vk::OutsideRenderPassCommandBufferHelper **outsideRPCommands);
527 
528     void queuePresent(vk::Context *context,
529                       egl::ContextPriority priority,
530                       const VkPresentInfoKHR &presentInfo,
531                       vk::SwapchainStatus *swapchainStatus);
532 
533     // Only useful if async submission is enabled
534     angle::Result waitForPresentToBeSubmitted(vk::SwapchainStatus *swapchainStatus);
535 
536     angle::Result getOutsideRenderPassCommandBufferHelper(
537         vk::Context *context,
538         vk::SecondaryCommandPool *commandPool,
539         vk::SecondaryCommandMemoryAllocator *commandsAllocator,
540         vk::OutsideRenderPassCommandBufferHelper **commandBufferHelperOut);
541     angle::Result getRenderPassCommandBufferHelper(
542         vk::Context *context,
543         vk::SecondaryCommandPool *commandPool,
544         vk::SecondaryCommandMemoryAllocator *commandsAllocator,
545         vk::RenderPassCommandBufferHelper **commandBufferHelperOut);
546 
547     void recycleOutsideRenderPassCommandBufferHelper(
548         vk::OutsideRenderPassCommandBufferHelper **commandBuffer);
549     void recycleRenderPassCommandBufferHelper(vk::RenderPassCommandBufferHelper **commandBuffer);
550 
551     // Process GPU memory reports
processMemoryReportCallback(const VkDeviceMemoryReportCallbackDataEXT & callbackData)552     void processMemoryReportCallback(const VkDeviceMemoryReportCallbackDataEXT &callbackData)
553     {
554         bool logCallback = getFeatures().logMemoryReportCallbacks.enabled;
555         mMemoryReport.processCallback(callbackData, logCallback);
556     }
557 
558     // Accumulate cache stats for a specific cache
accumulateCacheStats(VulkanCacheType cache,const CacheStats & stats)559     void accumulateCacheStats(VulkanCacheType cache, const CacheStats &stats)
560     {
561         std::unique_lock<std::mutex> localLock(mCacheStatsMutex);
562         mVulkanCacheStats[cache].accumulate(stats);
563     }
564     // Log cache stats for all caches
565     void logCacheStats() const;
566 
getSupportedVulkanPipelineStageMask()567     VkPipelineStageFlags getSupportedVulkanPipelineStageMask() const
568     {
569         return mSupportedVulkanPipelineStageMask;
570     }
571 
getSupportedVulkanShaderStageMask()572     VkShaderStageFlags getSupportedVulkanShaderStageMask() const
573     {
574         return mSupportedVulkanShaderStageMask;
575     }
576 
577     angle::Result getFormatDescriptorCountForVkFormat(ContextVk *contextVk,
578                                                       VkFormat format,
579                                                       uint32_t *descriptorCountOut);
580 
581     angle::Result getFormatDescriptorCountForExternalFormat(ContextVk *contextVk,
582                                                             uint64_t format,
583                                                             uint32_t *descriptorCountOut);
584 
getMaxCopyBytesUsingCPUWhenPreservingBufferData()585     VkDeviceSize getMaxCopyBytesUsingCPUWhenPreservingBufferData() const
586     {
587         return mMaxCopyBytesUsingCPUWhenPreservingBufferData;
588     }
589 
getEnabledInstanceExtensions()590     const vk::ExtensionNameList &getEnabledInstanceExtensions() const
591     {
592         return mEnabledInstanceExtensions;
593     }
594 
getEnabledDeviceExtensions()595     const vk::ExtensionNameList &getEnabledDeviceExtensions() const
596     {
597         return mEnabledDeviceExtensions;
598     }
599 
600     VkDeviceSize getPreferedBufferBlockSize(uint32_t memoryTypeIndex) const;
601 
getDefaultBufferAlignment()602     size_t getDefaultBufferAlignment() const { return mDefaultBufferAlignment; }
603 
getStagingBufferMemoryTypeIndex(vk::MemoryCoherency coherency)604     uint32_t getStagingBufferMemoryTypeIndex(vk::MemoryCoherency coherency) const
605     {
606         return coherency == vk::MemoryCoherency::Coherent
607                    ? mCoherentStagingBufferMemoryTypeIndex
608                    : mNonCoherentStagingBufferMemoryTypeIndex;
609     }
getStagingBufferAlignment()610     size_t getStagingBufferAlignment() const { return mStagingBufferAlignment; }
611 
getVertexConversionBufferMemoryTypeIndex(vk::MemoryHostVisibility hostVisibility)612     uint32_t getVertexConversionBufferMemoryTypeIndex(vk::MemoryHostVisibility hostVisibility) const
613     {
614         return hostVisibility == vk::MemoryHostVisibility::Visible
615                    ? mHostVisibleVertexConversionBufferMemoryTypeIndex
616                    : mDeviceLocalVertexConversionBufferMemoryTypeIndex;
617     }
getVertexConversionBufferAlignment()618     size_t getVertexConversionBufferAlignment() const { return mVertexConversionBufferAlignment; }
619 
getDeviceLocalMemoryTypeIndex()620     uint32_t getDeviceLocalMemoryTypeIndex() const
621     {
622         return mDeviceLocalVertexConversionBufferMemoryTypeIndex;
623     }
624 
625     void addBufferBlockToOrphanList(vk::BufferBlock *block);
626     void pruneOrphanedBufferBlocks();
627 
isShadingRateSupported(gl::ShadingRate shadingRate)628     bool isShadingRateSupported(gl::ShadingRate shadingRate) const
629     {
630         return mSupportedFragmentShadingRates.test(shadingRate);
631     }
632 
getSuballocationDestroyedSize()633     VkDeviceSize getSuballocationDestroyedSize() const
634     {
635         return mSuballocationGarbageDestroyed.load(std::memory_order_consume);
636     }
onBufferPoolPrune()637     void onBufferPoolPrune() { mSuballocationGarbageDestroyed = 0; }
getSuballocationGarbageSize()638     VkDeviceSize getSuballocationGarbageSize() const
639     {
640         return mSuballocationGarbageSizeInBytesCachedAtomic.load(std::memory_order_consume);
641     }
getPendingSubmissionGarbageSize()642     size_t getPendingSubmissionGarbageSize() const
643     {
644         std::unique_lock<std::mutex> lock(mGarbageMutex);
645         return mPendingSubmissionGarbage.size();
646     }
647 
getPreferredFilterForYUV(VkFilter defaultFilter)648     ANGLE_INLINE VkFilter getPreferredFilterForYUV(VkFilter defaultFilter)
649     {
650         return getFeatures().preferLinearFilterForYUV.enabled ? VK_FILTER_LINEAR : defaultFilter;
651     }
652 
653     // Convenience helpers to check for dynamic state ANGLE features which depend on the more
654     // encompassing feature for support of the relevant extension.  When the extension-support
655     // feature is disabled, the derived dynamic state is automatically disabled.
useVertexInputBindingStrideDynamicState()656     ANGLE_INLINE bool useVertexInputBindingStrideDynamicState()
657     {
658         return getFeatures().supportsExtendedDynamicState.enabled &&
659                getFeatures().useVertexInputBindingStrideDynamicState.enabled;
660     }
useCullModeDynamicState()661     ANGLE_INLINE bool useCullModeDynamicState()
662     {
663         return getFeatures().supportsExtendedDynamicState.enabled &&
664                getFeatures().useCullModeDynamicState.enabled;
665     }
useDepthCompareOpDynamicState()666     ANGLE_INLINE bool useDepthCompareOpDynamicState()
667     {
668         return getFeatures().supportsExtendedDynamicState.enabled &&
669                getFeatures().useDepthCompareOpDynamicState.enabled;
670     }
useDepthTestEnableDynamicState()671     ANGLE_INLINE bool useDepthTestEnableDynamicState()
672     {
673         return getFeatures().supportsExtendedDynamicState.enabled &&
674                getFeatures().useDepthTestEnableDynamicState.enabled;
675     }
useDepthWriteEnableDynamicState()676     ANGLE_INLINE bool useDepthWriteEnableDynamicState()
677     {
678         return getFeatures().supportsExtendedDynamicState.enabled &&
679                getFeatures().useDepthWriteEnableDynamicState.enabled;
680     }
useFrontFaceDynamicState()681     ANGLE_INLINE bool useFrontFaceDynamicState()
682     {
683         return getFeatures().supportsExtendedDynamicState.enabled &&
684                getFeatures().useFrontFaceDynamicState.enabled;
685     }
useStencilOpDynamicState()686     ANGLE_INLINE bool useStencilOpDynamicState()
687     {
688         return getFeatures().supportsExtendedDynamicState.enabled &&
689                getFeatures().useStencilOpDynamicState.enabled;
690     }
useStencilTestEnableDynamicState()691     ANGLE_INLINE bool useStencilTestEnableDynamicState()
692     {
693         return getFeatures().supportsExtendedDynamicState.enabled &&
694                getFeatures().useStencilTestEnableDynamicState.enabled;
695     }
usePrimitiveRestartEnableDynamicState()696     ANGLE_INLINE bool usePrimitiveRestartEnableDynamicState()
697     {
698         return getFeatures().supportsExtendedDynamicState2.enabled &&
699                getFeatures().usePrimitiveRestartEnableDynamicState.enabled;
700     }
useRasterizerDiscardEnableDynamicState()701     ANGLE_INLINE bool useRasterizerDiscardEnableDynamicState()
702     {
703         return getFeatures().supportsExtendedDynamicState2.enabled &&
704                getFeatures().useRasterizerDiscardEnableDynamicState.enabled;
705     }
useDepthBiasEnableDynamicState()706     ANGLE_INLINE bool useDepthBiasEnableDynamicState()
707     {
708         return getFeatures().supportsExtendedDynamicState2.enabled &&
709                getFeatures().useDepthBiasEnableDynamicState.enabled;
710     }
useLogicOpDynamicState()711     ANGLE_INLINE bool useLogicOpDynamicState()
712     {
713         return getFeatures().supportsExtendedDynamicState2.enabled &&
714                getFeatures().supportsLogicOpDynamicState.enabled;
715     }
716 
717     angle::Result allocateScopedQueueSerialIndex(vk::ScopedQueueSerialIndex *indexOut);
718     angle::Result allocateQueueSerialIndex(SerialIndex *serialIndexOut);
getLargestQueueSerialIndexEverAllocated()719     size_t getLargestQueueSerialIndexEverAllocated() const
720     {
721         return mQueueSerialIndexAllocator.getLargestIndexEverAllocated();
722     }
723     void releaseQueueSerialIndex(SerialIndex index);
724     Serial generateQueueSerial(SerialIndex index);
725     void reserveQueueSerials(SerialIndex index,
726                              size_t count,
727                              RangedSerialFactory *rangedSerialFactory);
728 
729     // Return true if all serials in ResourceUse have been submitted.
730     bool hasResourceUseSubmitted(const vk::ResourceUse &use) const;
731     bool hasQueueSerialSubmitted(const QueueSerial &queueSerial) const;
732     Serial getLastSubmittedSerial(SerialIndex index) const;
733     // Return true if all serials in ResourceUse have been finished.
734     bool hasResourceUseFinished(const vk::ResourceUse &use) const;
735     bool hasQueueSerialFinished(const QueueSerial &queueSerial) const;
736 
737     // Memory statistics can be updated on allocation and deallocation.
738     template <typename HandleT>
onMemoryAlloc(vk::MemoryAllocationType allocType,VkDeviceSize size,uint32_t memoryTypeIndex,HandleT handle)739     void onMemoryAlloc(vk::MemoryAllocationType allocType,
740                        VkDeviceSize size,
741                        uint32_t memoryTypeIndex,
742                        HandleT handle)
743     {
744         mMemoryAllocationTracker.onMemoryAllocImpl(allocType, size, memoryTypeIndex,
745                                                    reinterpret_cast<void *>(handle));
746     }
747 
748     template <typename HandleT>
onMemoryDealloc(vk::MemoryAllocationType allocType,VkDeviceSize size,uint32_t memoryTypeIndex,HandleT handle)749     void onMemoryDealloc(vk::MemoryAllocationType allocType,
750                          VkDeviceSize size,
751                          uint32_t memoryTypeIndex,
752                          HandleT handle)
753     {
754         mMemoryAllocationTracker.onMemoryDeallocImpl(allocType, size, memoryTypeIndex,
755                                                      reinterpret_cast<void *>(handle));
756     }
757 
getMemoryAllocationTracker()758     MemoryAllocationTracker *getMemoryAllocationTracker() { return &mMemoryAllocationTracker; }
759 
760     void requestAsyncCommandsAndGarbageCleanup(vk::Context *context);
761 
762     // Try to finish a command batch from the queue and free garbage memory in the event of an OOM
763     // error.
764     angle::Result finishOneCommandBatchAndCleanup(vk::Context *context, bool *anyBatchCleaned);
765 
766     // Static function to get Vulkan object type name.
767     static const char *GetVulkanObjectTypeName(VkObjectType type);
768 
769   private:
770     angle::Result initializeDevice(DisplayVk *displayVk, uint32_t queueFamilyIndex);
771     void ensureCapsInitialized() const;
772     void initializeValidationMessageSuppressions();
773 
774     void queryDeviceExtensionFeatures(const vk::ExtensionNameList &deviceExtensionNames);
775     void appendDeviceExtensionFeaturesNotPromoted(const vk::ExtensionNameList &deviceExtensionNames,
776                                                   VkPhysicalDeviceFeatures2KHR *deviceFeatures,
777                                                   VkPhysicalDeviceProperties2 *deviceProperties);
778     void appendDeviceExtensionFeaturesPromotedTo11(
779         const vk::ExtensionNameList &deviceExtensionNames,
780         VkPhysicalDeviceFeatures2KHR *deviceFeatures,
781         VkPhysicalDeviceProperties2 *deviceProperties);
782     void appendDeviceExtensionFeaturesPromotedTo12(
783         const vk::ExtensionNameList &deviceExtensionNames,
784         VkPhysicalDeviceFeatures2KHR *deviceFeatures,
785         VkPhysicalDeviceProperties2 *deviceProperties);
786     void appendDeviceExtensionFeaturesPromotedTo13(
787         const vk::ExtensionNameList &deviceExtensionNames,
788         VkPhysicalDeviceFeatures2KHR *deviceFeatures,
789         VkPhysicalDeviceProperties2 *deviceProperties);
790 
791     angle::Result enableInstanceExtensions(DisplayVk *displayVk,
792                                            const VulkanLayerVector &enabledInstanceLayerNames,
793                                            const char *wsiExtension,
794                                            bool canLoadDebugUtils);
795     angle::Result enableDeviceExtensions(DisplayVk *displayVk,
796                                          const VulkanLayerVector &enabledDeviceLayerNames);
797 
798     void enableDeviceExtensionsNotPromoted(const vk::ExtensionNameList &deviceExtensionNames);
799     void enableDeviceExtensionsPromotedTo11(const vk::ExtensionNameList &deviceExtensionNames);
800     void enableDeviceExtensionsPromotedTo12(const vk::ExtensionNameList &deviceExtensionNames);
801     void enableDeviceExtensionsPromotedTo13(const vk::ExtensionNameList &deviceExtensionNames);
802 
803     void initInstanceExtensionEntryPoints();
804     void initDeviceExtensionEntryPoints();
805     // Initialize extension entry points from core ones if needed
806     void initializeInstanceExtensionEntryPointsFromCore() const;
807     void initializeDeviceExtensionEntryPointsFromCore() const;
808 
809     void initFeatures(DisplayVk *display, const vk::ExtensionNameList &extensions);
810     void appBasedFeatureOverrides(DisplayVk *display, const vk::ExtensionNameList &extensions);
811     angle::Result initPipelineCache(DisplayVk *display,
812                                     vk::PipelineCache *pipelineCache,
813                                     bool *success);
814 
815     template <VkFormatFeatureFlags VkFormatProperties::*features>
816     VkFormatFeatureFlags getFormatFeatureBits(angle::FormatID formatID,
817                                               const VkFormatFeatureFlags featureBits) const;
818 
819     template <VkFormatFeatureFlags VkFormatProperties::*features>
820     bool hasFormatFeatureBits(angle::FormatID formatID,
821                               const VkFormatFeatureFlags featureBits) const;
822 
823     // Initialize VMA allocator and buffer suballocator related data.
824     angle::Result initializeMemoryAllocator(DisplayVk *displayVk);
825 
826     // Query and cache supported fragment shading rates
827     bool canSupportFragmentShadingRate(const vk::ExtensionNameList &deviceExtensionNames);
828     // Prefer host visible device local via device local based on device type and heap size.
829     bool canPreferDeviceLocalMemoryHostVisible(VkPhysicalDeviceType deviceType);
830 
831     template <typename CommandBufferHelperT, typename RecyclerT>
832     angle::Result getCommandBufferImpl(vk::Context *context,
833                                        vk::SecondaryCommandPool *commandPool,
834                                        vk::SecondaryCommandMemoryAllocator *commandsAllocator,
835                                        RecyclerT *recycler,
836                                        CommandBufferHelperT **commandBufferHelperOut);
837 
838     egl::Display *mDisplay;
839 
840     void *mLibVulkanLibrary;
841 
842     mutable bool mCapsInitialized;
843     mutable gl::Caps mNativeCaps;
844     mutable gl::TextureCapsMap mNativeTextureCaps;
845     mutable gl::Extensions mNativeExtensions;
846     mutable gl::Limitations mNativeLimitations;
847     mutable ShPixelLocalStorageOptions mNativePLSOptions;
848     mutable angle::FeaturesVk mFeatures;
849 
850     // The instance and device versions.  The instance version is the one from the Vulkan loader,
851     // while the device version comes from VkPhysicalDeviceProperties::apiVersion.  With instance
852     // version 1.0, only device version 1.0 can be used.  If instance version is at least 1.1, any
853     // device version (even higher than that) can be used.  Some extensions have been promoted to
854     // Vulkan 1.1 or higher, but the version check must be done against the instance or device
855     // version, depending on whether it's an instance or device extension.
856     //
857     // Note that mDeviceVersion is technically redundant with mPhysicalDeviceProperties.apiVersion,
858     // but ANGLE may use a smaller version with problematic ICDs.
859     uint32_t mInstanceVersion;
860     uint32_t mDeviceVersion;
861 
862     VkInstance mInstance;
863     bool mEnableValidationLayers;
864     // True if ANGLE is enabling the VK_EXT_debug_utils extension.
865     bool mEnableDebugUtils;
866     // True if ANGLE should call the vkCmd*DebugUtilsLabelEXT functions in order to communicate
867     // to debuggers (e.g. AGI) the OpenGL ES commands that the application uses.  This is
868     // independent of mEnableDebugUtils, as an external graphics debugger can enable the
869     // VK_EXT_debug_utils extension and cause this to be set true.
870     bool mAngleDebuggerMode;
871     angle::vk::ICD mEnabledICD;
872     VkDebugUtilsMessengerEXT mDebugUtilsMessenger;
873     VkPhysicalDevice mPhysicalDevice;
874 
875     VkPhysicalDeviceProperties mPhysicalDeviceProperties;
876     VkPhysicalDeviceVulkan11Properties mPhysicalDevice11Properties;
877 
878     VkPhysicalDeviceFeatures mPhysicalDeviceFeatures;
879     VkPhysicalDeviceVulkan11Features mPhysicalDevice11Features;
880 
881     VkPhysicalDeviceLineRasterizationFeaturesEXT mLineRasterizationFeatures;
882     VkPhysicalDeviceProvokingVertexFeaturesEXT mProvokingVertexFeatures;
883     VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT mVertexAttributeDivisorFeatures;
884     VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT mVertexAttributeDivisorProperties;
885     VkPhysicalDeviceTransformFeedbackFeaturesEXT mTransformFeedbackFeatures;
886     VkPhysicalDeviceIndexTypeUint8FeaturesEXT mIndexTypeUint8Features;
887     VkPhysicalDeviceSubgroupProperties mSubgroupProperties;
888     VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR mSubgroupExtendedTypesFeatures;
889     VkPhysicalDeviceDeviceMemoryReportFeaturesEXT mMemoryReportFeatures;
890     VkDeviceDeviceMemoryReportCreateInfoEXT mMemoryReportCallback;
891     VkPhysicalDeviceShaderFloat16Int8FeaturesKHR mShaderFloat16Int8Features;
892     VkPhysicalDeviceDepthStencilResolvePropertiesKHR mDepthStencilResolveProperties;
893     VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesGOOGLEX
894         mMultisampledRenderToSingleSampledFeaturesGOOGLEX;
895     VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT
896         mMultisampledRenderToSingleSampledFeatures;
897     VkPhysicalDeviceImage2DViewOf3DFeaturesEXT mImage2dViewOf3dFeatures;
898     VkPhysicalDeviceMultiviewFeatures mMultiviewFeatures;
899     VkPhysicalDeviceFeatures2KHR mEnabledFeatures;
900     VkPhysicalDeviceMultiviewProperties mMultiviewProperties;
901     VkPhysicalDeviceDriverPropertiesKHR mDriverProperties;
902     VkPhysicalDeviceCustomBorderColorFeaturesEXT mCustomBorderColorFeatures;
903     VkPhysicalDeviceProtectedMemoryFeatures mProtectedMemoryFeatures;
904     VkPhysicalDeviceHostQueryResetFeaturesEXT mHostQueryResetFeatures;
905     VkPhysicalDeviceDepthClampZeroOneFeaturesEXT mDepthClampZeroOneFeatures;
906     VkPhysicalDeviceDepthClipEnableFeaturesEXT mDepthClipEnableFeatures;
907     VkPhysicalDeviceDepthClipControlFeaturesEXT mDepthClipControlFeatures;
908     VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT mPrimitivesGeneratedQueryFeatures;
909     VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT mPrimitiveTopologyListRestartFeatures;
910     VkPhysicalDeviceSamplerYcbcrConversionFeatures mSamplerYcbcrConversionFeatures;
911     VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT mPipelineCreationCacheControlFeatures;
912     VkPhysicalDeviceExtendedDynamicStateFeaturesEXT mExtendedDynamicStateFeatures;
913     VkPhysicalDeviceExtendedDynamicState2FeaturesEXT mExtendedDynamicState2Features;
914     VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT mGraphicsPipelineLibraryFeatures;
915     VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT mGraphicsPipelineLibraryProperties;
916     VkPhysicalDeviceFragmentShadingRateFeaturesKHR mFragmentShadingRateFeatures;
917     VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT mFragmentShaderInterlockFeatures;
918     VkPhysicalDeviceImagelessFramebufferFeaturesKHR mImagelessFramebufferFeatures;
919     VkPhysicalDevicePipelineRobustnessFeaturesEXT mPipelineRobustnessFeatures;
920     VkPhysicalDevicePipelineProtectedAccessFeaturesEXT mPipelineProtectedAccessFeatures;
921     VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT
922         mRasterizationOrderAttachmentAccessFeatures;
923     VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT mSwapchainMaintenance1Features;
924     VkPhysicalDeviceLegacyDitheringFeaturesEXT mDitheringFeatures;
925     VkPhysicalDeviceDrmPropertiesEXT mDrmProperties;
926     VkPhysicalDeviceTimelineSemaphoreFeaturesKHR mTimelineSemaphoreFeatures;
927 
928     angle::PackedEnumBitSet<gl::ShadingRate, uint8_t> mSupportedFragmentShadingRates;
929     std::vector<VkQueueFamilyProperties> mQueueFamilyProperties;
930     uint32_t mMaxVertexAttribDivisor;
931     uint32_t mCurrentQueueFamilyIndex;
932     VkDeviceSize mMaxVertexAttribStride;
933     uint32_t mDefaultUniformBufferSize;
934     VkDevice mDevice;
935     VkDeviceSize mMaxCopyBytesUsingCPUWhenPreservingBufferData;
936 
937     bool mDeviceLost;
938 
939     // We group garbage into four categories: mSharedGarbage is the garbage that has already
940     // submitted to vulkan, we expect them to finish in finite time. mPendingSubmissionGarbage
941     // is the garbage that is still referenced in the recorded commands. suballocations have its
942     // own dedicated garbage list for performance optimization since they tend to be the most
943     // common garbage objects. All these four groups of garbage share the same mutex lock.
944     mutable std::mutex mGarbageMutex;
945     vk::SharedGarbageList mSharedGarbage;
946     vk::SharedGarbageList mPendingSubmissionGarbage;
947     vk::SharedBufferSuballocationGarbageList mSuballocationGarbage;
948     vk::SharedBufferSuballocationGarbageList mPendingSubmissionSuballocationGarbage;
949     // Total suballocation garbage size in bytes.
950     VkDeviceSize mSuballocationGarbageSizeInBytes;
951 
952     // Total bytes of suballocation that been destroyed since last prune call. This can be
953     // accessed without mGarbageMutex, thus needs to be atomic to avoid tsan complain.
954     std::atomic<VkDeviceSize> mSuballocationGarbageDestroyed;
955     // This is the cached value of mSuballocationGarbageSizeInBytes but is accessed with atomic
956     // operation. This can be accessed from different threads without mGarbageMutex, so that
957     // thread sanitizer won't complain.
958     std::atomic<VkDeviceSize> mSuballocationGarbageSizeInBytesCachedAtomic;
959 
960     vk::FormatTable mFormatTable;
961     // A cache of VkFormatProperties as queried from the device over time.
962     mutable angle::FormatMap<VkFormatProperties> mFormatProperties;
963 
964     vk::Allocator mAllocator;
965 
966     // Used to allocate memory for images using VMA, utilizing suballocation.
967     vk::ImageMemorySuballocator mImageMemorySuballocator;
968 
969     vk::MemoryProperties mMemoryProperties;
970     VkDeviceSize mPreferredLargeHeapBlockSize;
971 
972     // The default alignment for BufferVk object
973     size_t mDefaultBufferAlignment;
974     // The cached memory type index for staging buffer that is host visible.
975     uint32_t mCoherentStagingBufferMemoryTypeIndex;
976     uint32_t mNonCoherentStagingBufferMemoryTypeIndex;
977     size_t mStagingBufferAlignment;
978     // For vertex conversion buffers
979     uint32_t mHostVisibleVertexConversionBufferMemoryTypeIndex;
980     uint32_t mDeviceLocalVertexConversionBufferMemoryTypeIndex;
981     size_t mVertexConversionBufferAlignment;
982 
983     // Holds orphaned BufferBlocks when ShareGroup gets destroyed
984     vk::BufferBlockPointerVector mOrphanedBufferBlocks;
985 
986     // All access to the pipeline cache is done through EGL objects so it is thread safe to not
987     // use a lock.
988     std::mutex mPipelineCacheMutex;
989     vk::PipelineCache mPipelineCache;
990     uint32_t mPipelineCacheVkUpdateTimeout;
991     size_t mPipelineCacheSizeAtLastSync;
992     bool mPipelineCacheInitialized;
993 
994     // Latest validation data for debug overlay.
995     std::string mLastValidationMessage;
996     uint32_t mValidationMessageCount;
997 
998     // Skipped validation messages.  The exact contents of the list depends on the availability
999     // of certain extensions.
1000     std::vector<const char *> mSkippedValidationMessages;
1001     // Syncval skipped messages.  The exact contents of the list depends on the availability of
1002     // certain extensions.
1003     std::vector<vk::SkippedSyncvalMessage> mSkippedSyncvalMessages;
1004 
1005     // Whether framebuffer fetch has been used, for the purposes of more accurate syncval error
1006     // filtering.
1007     bool mIsFramebufferFetchUsed;
1008 
1009     // How close to VkPhysicalDeviceLimits::maxMemoryAllocationCount we allow ourselves to get
1010     static constexpr double kPercentMaxMemoryAllocationCount = 0.3;
1011     // How many objects to garbage collect before issuing a flush()
1012     uint32_t mGarbageCollectionFlushThreshold;
1013 
1014     // Only used for "one off" command buffers.
1015     angle::PackedEnumMap<vk::ProtectionType, OneOffCommandPool> mOneOffCommandPoolMap;
1016 
1017     // Synchronous Command Queue
1018     vk::CommandQueue mCommandQueue;
1019 
1020     // Async Command Queue
1021     vk::CommandProcessor mCommandProcessor;
1022 
1023     // Command buffer pool management.
1024     vk::CommandBufferRecycler<vk::OutsideRenderPassCommandBufferHelper>
1025         mOutsideRenderPassCommandBufferRecycler;
1026     vk::CommandBufferRecycler<vk::RenderPassCommandBufferHelper> mRenderPassCommandBufferRecycler;
1027 
1028     SamplerCache mSamplerCache;
1029     SamplerYcbcrConversionCache mYuvConversionCache;
1030     angle::HashMap<VkFormat, uint32_t> mVkFormatDescriptorCountMap;
1031     vk::ActiveHandleCounter mActiveHandleCounts;
1032     std::mutex mActiveHandleCountsMutex;
1033 
1034     // Tracks resource serials.
1035     vk::ResourceSerialFactory mResourceSerialFactory;
1036 
1037     // QueueSerial generator
1038     vk::QueueSerialIndexAllocator mQueueSerialIndexAllocator;
1039     std::array<AtomicSerialFactory, kMaxQueueSerialIndexCount> mQueueSerialFactory;
1040 
1041     // Application executable information
1042     VkApplicationInfo mApplicationInfo;
1043     // Process GPU memory reports
1044     vk::MemoryReport mMemoryReport;
1045     // Helpers for adding trace annotations
1046     DebugAnnotatorVk mAnnotator;
1047 
1048     // Stats about all Vulkan object caches
1049     VulkanCacheStats mVulkanCacheStats;
1050     mutable std::mutex mCacheStatsMutex;
1051 
1052     // A mask to filter out Vulkan pipeline stages that are not supported, applied in situations
1053     // where multiple stages are prespecified (for example with image layout transitions):
1054     //
1055     // - Excludes GEOMETRY if geometry shaders are not supported.
1056     // - Excludes TESSELLATION_CONTROL and TESSELLATION_EVALUATION if tessellation shaders are
1057     // not
1058     //   supported.
1059     //
1060     // Note that this mask can have bits set that don't correspond to valid stages, so it's
1061     // strictly only useful for masking out unsupported stages in an otherwise valid set of
1062     // stages.
1063     VkPipelineStageFlags mSupportedVulkanPipelineStageMask;
1064     VkShaderStageFlags mSupportedVulkanShaderStageMask;
1065 
1066     // Use thread pool to compress cache data.
1067     std::shared_ptr<rx::WaitableCompressEvent> mCompressEvent;
1068 
1069     vk::ExtensionNameList mEnabledInstanceExtensions;
1070     vk::ExtensionNameList mEnabledDeviceExtensions;
1071 
1072     // Memory tracker for allocations and deallocations.
1073     MemoryAllocationTracker mMemoryAllocationTracker;
1074 };
1075 
generateQueueSerial(SerialIndex index)1076 ANGLE_INLINE Serial RendererVk::generateQueueSerial(SerialIndex index)
1077 {
1078     return mQueueSerialFactory[index].generate();
1079 }
1080 
reserveQueueSerials(SerialIndex index,size_t count,RangedSerialFactory * rangedSerialFactory)1081 ANGLE_INLINE void RendererVk::reserveQueueSerials(SerialIndex index,
1082                                                   size_t count,
1083                                                   RangedSerialFactory *rangedSerialFactory)
1084 {
1085     mQueueSerialFactory[index].reserve(rangedSerialFactory, count);
1086 }
1087 
hasResourceUseSubmitted(const vk::ResourceUse & use)1088 ANGLE_INLINE bool RendererVk::hasResourceUseSubmitted(const vk::ResourceUse &use) const
1089 {
1090     if (isAsyncCommandQueueEnabled())
1091     {
1092         return mCommandProcessor.hasResourceUseEnqueued(use);
1093     }
1094     else
1095     {
1096         return mCommandQueue.hasResourceUseSubmitted(use);
1097     }
1098 }
1099 
hasQueueSerialSubmitted(const QueueSerial & queueSerial)1100 ANGLE_INLINE bool RendererVk::hasQueueSerialSubmitted(const QueueSerial &queueSerial) const
1101 {
1102     if (isAsyncCommandQueueEnabled())
1103     {
1104         return mCommandProcessor.hasQueueSerialEnqueued(queueSerial);
1105     }
1106     else
1107     {
1108         return mCommandQueue.hasQueueSerialSubmitted(queueSerial);
1109     }
1110 }
1111 
getLastSubmittedSerial(SerialIndex index)1112 ANGLE_INLINE Serial RendererVk::getLastSubmittedSerial(SerialIndex index) const
1113 {
1114     if (isAsyncCommandQueueEnabled())
1115     {
1116         return mCommandProcessor.getLastEnqueuedSerial(index);
1117     }
1118     else
1119     {
1120         return mCommandQueue.getLastSubmittedSerial(index);
1121     }
1122 }
1123 
hasResourceUseFinished(const vk::ResourceUse & use)1124 ANGLE_INLINE bool RendererVk::hasResourceUseFinished(const vk::ResourceUse &use) const
1125 {
1126     return mCommandQueue.hasResourceUseFinished(use);
1127 }
1128 
hasQueueSerialFinished(const QueueSerial & queueSerial)1129 ANGLE_INLINE bool RendererVk::hasQueueSerialFinished(const QueueSerial &queueSerial) const
1130 {
1131     return mCommandQueue.hasQueueSerialFinished(queueSerial);
1132 }
1133 
waitForPresentToBeSubmitted(vk::SwapchainStatus * swapchainStatus)1134 ANGLE_INLINE angle::Result RendererVk::waitForPresentToBeSubmitted(
1135     vk::SwapchainStatus *swapchainStatus)
1136 {
1137     if (isAsyncCommandQueueEnabled())
1138     {
1139         return mCommandProcessor.waitForPresentToBeSubmitted(swapchainStatus);
1140     }
1141     ASSERT(!swapchainStatus->isPending);
1142     return angle::Result::Continue;
1143 }
1144 
requestAsyncCommandsAndGarbageCleanup(vk::Context * context)1145 ANGLE_INLINE void RendererVk::requestAsyncCommandsAndGarbageCleanup(vk::Context *context)
1146 {
1147     mCommandProcessor.requestCommandsAndGarbageCleanup();
1148 }
1149 
checkCompletedCommands(vk::Context * context)1150 ANGLE_INLINE angle::Result RendererVk::checkCompletedCommands(vk::Context *context)
1151 {
1152     return mCommandQueue.checkAndCleanupCompletedCommands(context);
1153 }
1154 
retireFinishedCommands(vk::Context * context)1155 ANGLE_INLINE angle::Result RendererVk::retireFinishedCommands(vk::Context *context)
1156 {
1157     return mCommandQueue.retireFinishedCommands(context);
1158 }
1159 }  // namespace rx
1160 
1161 #endif  // LIBANGLE_RENDERER_VULKAN_RENDERERVK_H_
1162