• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_utils:
7 //    Helper functions for the Vulkan Renderer.
8 //
9 
10 #ifndef LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
11 #define LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
12 
13 #include <atomic>
14 #include <limits>
15 #include <queue>
16 
17 #include "GLSLANG/ShaderLang.h"
18 #include "common/FixedVector.h"
19 #include "common/Optional.h"
20 #include "common/PackedEnums.h"
21 #include "common/debug.h"
22 #include "libANGLE/Error.h"
23 #include "libANGLE/Observer.h"
24 #include "libANGLE/angletypes.h"
25 #include "libANGLE/renderer/serial_utils.h"
26 #include "libANGLE/renderer/vulkan/SecondaryCommandBuffer.h"
27 #include "libANGLE/renderer/vulkan/VulkanSecondaryCommandBuffer.h"
28 #include "libANGLE/renderer/vulkan/vk_wrapper.h"
29 #include "vulkan/vulkan_fuchsia_ext.h"
30 
31 #define ANGLE_GL_OBJECTS_X(PROC) \
32     PROC(Buffer)                 \
33     PROC(Context)                \
34     PROC(Framebuffer)            \
35     PROC(MemoryObject)           \
36     PROC(Overlay)                \
37     PROC(Program)                \
38     PROC(ProgramPipeline)        \
39     PROC(Query)                  \
40     PROC(Renderbuffer)           \
41     PROC(Sampler)                \
42     PROC(Semaphore)              \
43     PROC(Texture)                \
44     PROC(TransformFeedback)      \
45     PROC(VertexArray)
46 
47 #define ANGLE_PRE_DECLARE_OBJECT(OBJ) class OBJ;
48 
49 namespace egl
50 {
51 class Display;
52 class Image;
53 class ShareGroup;
54 }  // namespace egl
55 
56 namespace gl
57 {
58 class MockOverlay;
59 struct RasterizerState;
60 struct SwizzleState;
61 struct VertexAttribute;
62 class VertexBinding;
63 
64 ANGLE_GL_OBJECTS_X(ANGLE_PRE_DECLARE_OBJECT)
65 }  // namespace gl
66 
67 #define ANGLE_PRE_DECLARE_VK_OBJECT(OBJ) class OBJ##Vk;
68 
69 namespace rx
70 {
71 class DisplayVk;
72 class ImageVk;
73 class ProgramExecutableVk;
74 class RenderbufferVk;
75 class RenderTargetVk;
76 class RendererVk;
77 class RenderPassCache;
78 class ShareGroupVk;
79 }  // namespace rx
80 
81 namespace angle
82 {
83 egl::Error ToEGL(Result result, rx::DisplayVk *displayVk, EGLint errorCode);
84 }  // namespace angle
85 
86 namespace rx
87 {
88 ANGLE_GL_OBJECTS_X(ANGLE_PRE_DECLARE_VK_OBJECT)
89 
90 const char *VulkanResultString(VkResult result);
91 
92 constexpr size_t kMaxVulkanLayers = 20;
93 using VulkanLayerVector           = angle::FixedVector<const char *, kMaxVulkanLayers>;
94 
95 // Verify that validation layers are available.
96 bool GetAvailableValidationLayers(const std::vector<VkLayerProperties> &layerProps,
97                                   bool mustHaveLayers,
98                                   VulkanLayerVector *enabledLayerNames);
99 
100 enum class TextureDimension
101 {
102     TEX_2D,
103     TEX_CUBE,
104     TEX_3D,
105     TEX_2D_ARRAY,
106 };
107 
108 // A maximum offset of 4096 covers almost every Vulkan driver on desktop (80%) and mobile (99%). The
109 // next highest values to meet native drivers are 16 bits or 32 bits.
110 constexpr uint32_t kAttributeOffsetMaxBits = 15;
111 constexpr uint32_t kInvalidMemoryTypeIndex = UINT32_MAX;
112 
113 namespace vk
114 {
115 // A packed attachment index interface with vulkan API
116 class PackedAttachmentIndex final
117 {
118   public:
PackedAttachmentIndex(uint32_t index)119     explicit constexpr PackedAttachmentIndex(uint32_t index) : mAttachmentIndex(index) {}
120     constexpr PackedAttachmentIndex(const PackedAttachmentIndex &other) = default;
121     constexpr PackedAttachmentIndex &operator=(const PackedAttachmentIndex &other) = default;
122 
get()123     constexpr uint32_t get() const { return mAttachmentIndex; }
124     PackedAttachmentIndex &operator++()
125     {
126         ++mAttachmentIndex;
127         return *this;
128     }
129     constexpr bool operator==(const PackedAttachmentIndex &other) const
130     {
131         return mAttachmentIndex == other.mAttachmentIndex;
132     }
133     constexpr bool operator!=(const PackedAttachmentIndex &other) const
134     {
135         return mAttachmentIndex != other.mAttachmentIndex;
136     }
137     constexpr bool operator<(const PackedAttachmentIndex &other) const
138     {
139         return mAttachmentIndex < other.mAttachmentIndex;
140     }
141 
142   private:
143     uint32_t mAttachmentIndex;
144 };
145 using PackedAttachmentCount                                    = PackedAttachmentIndex;
146 static constexpr PackedAttachmentIndex kAttachmentIndexInvalid = PackedAttachmentIndex(-1);
147 static constexpr PackedAttachmentIndex kAttachmentIndexZero    = PackedAttachmentIndex(0);
148 
149 // Prepend ptr to the pNext chain at chainStart
150 template <typename VulkanStruct1, typename VulkanStruct2>
AddToPNextChain(VulkanStruct1 * chainStart,VulkanStruct2 * ptr)151 void AddToPNextChain(VulkanStruct1 *chainStart, VulkanStruct2 *ptr)
152 {
153     ASSERT(ptr->pNext == nullptr);
154 
155     VkBaseOutStructure *localPtr = reinterpret_cast<VkBaseOutStructure *>(chainStart);
156     ptr->pNext                   = localPtr->pNext;
157     localPtr->pNext              = reinterpret_cast<VkBaseOutStructure *>(ptr);
158 }
159 
160 // Append ptr to the end of the chain
161 template <typename VulkanStruct1, typename VulkanStruct2>
AppendToPNextChain(VulkanStruct1 * chainStart,VulkanStruct2 * ptr)162 void AppendToPNextChain(VulkanStruct1 *chainStart, VulkanStruct2 *ptr)
163 {
164     if (!ptr)
165     {
166         return;
167     }
168 
169     VkBaseOutStructure *endPtr = reinterpret_cast<VkBaseOutStructure *>(chainStart);
170     while (endPtr->pNext)
171     {
172         endPtr = endPtr->pNext;
173     }
174     endPtr->pNext = reinterpret_cast<VkBaseOutStructure *>(ptr);
175 }
176 
177 struct Error
178 {
179     VkResult errorCode;
180     const char *file;
181     const char *function;
182     uint32_t line;
183 };
184 
185 // Abstracts error handling. Implemented by both ContextVk for GL and DisplayVk for EGL errors.
186 class Context : angle::NonCopyable
187 {
188   public:
189     Context(RendererVk *renderer);
190     virtual ~Context();
191 
192     virtual void handleError(VkResult result,
193                              const char *file,
194                              const char *function,
195                              unsigned int line) = 0;
196     VkDevice getDevice() const;
getRenderer()197     RendererVk *getRenderer() const { return mRenderer; }
198 
getPerfCounters()199     const angle::VulkanPerfCounters &getPerfCounters() const { return mPerfCounters; }
getPerfCounters()200     angle::VulkanPerfCounters &getPerfCounters() { return mPerfCounters; }
201 
202   protected:
203     RendererVk *const mRenderer;
204     angle::VulkanPerfCounters mPerfCounters;
205 };
206 
207 class RenderPassDesc;
208 
209 #if ANGLE_USE_CUSTOM_VULKAN_OUTSIDE_RENDER_PASS_CMD_BUFFERS
210 using OutsideRenderPassCommandBuffer = priv::SecondaryCommandBuffer;
211 #else
212 using OutsideRenderPassCommandBuffer         = VulkanSecondaryCommandBuffer;
213 #endif
214 #if ANGLE_USE_CUSTOM_VULKAN_RENDER_PASS_CMD_BUFFERS
215 using RenderPassCommandBuffer = priv::SecondaryCommandBuffer;
216 #else
217 using RenderPassCommandBuffer                = VulkanSecondaryCommandBuffer;
218 #endif
219 
220 struct SecondaryCommandBufferList
221 {
222     std::vector<OutsideRenderPassCommandBuffer> outsideRenderPassCommandBuffers;
223     std::vector<RenderPassCommandBuffer> renderPassCommandBuffers;
224 };
225 
226 struct SecondaryCommandPools
227 {
228     CommandPool outsideRenderPassPool;
229     CommandPool renderPassPool;
230 };
231 
232 VkImageAspectFlags GetDepthStencilAspectFlags(const angle::Format &format);
233 VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format);
234 
235 template <typename T>
236 struct ImplTypeHelper;
237 
238 // clang-format off
239 #define ANGLE_IMPL_TYPE_HELPER_GL(OBJ) \
240 template<>                             \
241 struct ImplTypeHelper<gl::OBJ>         \
242 {                                      \
243     using ImplType = OBJ##Vk;          \
244 };
245 // clang-format on
246 
247 ANGLE_GL_OBJECTS_X(ANGLE_IMPL_TYPE_HELPER_GL)
248 
249 template <>
250 struct ImplTypeHelper<gl::MockOverlay>
251 {
252     using ImplType = OverlayVk;
253 };
254 
255 template <>
256 struct ImplTypeHelper<egl::Display>
257 {
258     using ImplType = DisplayVk;
259 };
260 
261 template <>
262 struct ImplTypeHelper<egl::Image>
263 {
264     using ImplType = ImageVk;
265 };
266 
267 template <>
268 struct ImplTypeHelper<egl::ShareGroup>
269 {
270     using ImplType = ShareGroupVk;
271 };
272 
273 template <typename T>
274 using GetImplType = typename ImplTypeHelper<T>::ImplType;
275 
276 template <typename T>
277 GetImplType<T> *GetImpl(const T *glObject)
278 {
279     return GetImplAs<GetImplType<T>>(glObject);
280 }
281 
282 template <typename T>
283 GetImplType<T> *SafeGetImpl(const T *glObject)
284 {
285     return SafeGetImplAs<GetImplType<T>>(glObject);
286 }
287 
288 template <>
289 inline OverlayVk *GetImpl(const gl::MockOverlay *glObject)
290 {
291     return nullptr;
292 }
293 
294 template <typename ObjT>
295 class ObjectAndSerial final : angle::NonCopyable
296 {
297   public:
298     ObjectAndSerial() {}
299 
300     ObjectAndSerial(ObjT &&object, Serial serial) : mObject(std::move(object)), mSerial(serial) {}
301 
302     ObjectAndSerial(ObjectAndSerial &&other)
303         : mObject(std::move(other.mObject)), mSerial(std::move(other.mSerial))
304     {}
305     ObjectAndSerial &operator=(ObjectAndSerial &&other)
306     {
307         mObject = std::move(other.mObject);
308         mSerial = std::move(other.mSerial);
309         return *this;
310     }
311 
312     Serial getSerial() const { return mSerial; }
313     void updateSerial(Serial newSerial) { mSerial = newSerial; }
314 
315     const ObjT &get() const { return mObject; }
316     ObjT &get() { return mObject; }
317 
318     bool valid() const { return mObject.valid(); }
319 
320     void destroy(VkDevice device)
321     {
322         mObject.destroy(device);
323         mSerial = Serial();
324     }
325 
326   private:
327     ObjT mObject;
328     Serial mSerial;
329 };
330 
331 // Reference to a deleted object. The object is due to be destroyed at some point in the future.
332 // |mHandleType| determines the type of the object and which destroy function should be called.
333 class GarbageObject
334 {
335   public:
336     GarbageObject();
337     GarbageObject(GarbageObject &&other);
338     GarbageObject &operator=(GarbageObject &&rhs);
339 
340     bool valid() const { return mHandle != VK_NULL_HANDLE; }
341     void destroy(RendererVk *renderer);
342 
343     template <typename DerivedT, typename HandleT>
344     static GarbageObject Get(WrappedObject<DerivedT, HandleT> *object)
345     {
346         // Using c-style cast here to avoid conditional compile for MSVC 32-bit
347         //  which fails to compile with reinterpret_cast, requiring static_cast.
348         return GarbageObject(HandleTypeHelper<DerivedT>::kHandleType,
349                              (GarbageHandle)(object->release()));
350     }
351 
352   private:
353     VK_DEFINE_NON_DISPATCHABLE_HANDLE(GarbageHandle)
354     GarbageObject(HandleType handleType, GarbageHandle handle);
355 
356     HandleType mHandleType;
357     GarbageHandle mHandle;
358 };
359 
360 template <typename T>
361 GarbageObject GetGarbage(T *obj)
362 {
363     return GarbageObject::Get(obj);
364 }
365 
366 // A list of garbage objects. Has no object lifetime information.
367 using GarbageList = std::vector<GarbageObject>;
368 
369 // A list of garbage objects and the associated serial after which the objects can be destroyed.
370 using GarbageAndSerial = ObjectAndSerial<GarbageList>;
371 
372 // Houses multiple lists of garbage objects. Each sub-list has a different lifetime. They should be
373 // sorted such that later-living garbage is ordered later in the list.
374 using GarbageQueue = std::queue<GarbageAndSerial>;
375 
376 class MemoryProperties final : angle::NonCopyable
377 {
378   public:
379     MemoryProperties();
380 
381     void init(VkPhysicalDevice physicalDevice);
382     bool hasLazilyAllocatedMemory() const;
383     angle::Result findCompatibleMemoryIndex(Context *context,
384                                             const VkMemoryRequirements &memoryRequirements,
385                                             VkMemoryPropertyFlags requestedMemoryPropertyFlags,
386                                             bool isExternalMemory,
387                                             VkMemoryPropertyFlags *memoryPropertyFlagsOut,
388                                             uint32_t *indexOut) const;
389     void destroy();
390 
391     VkDeviceSize getHeapSizeForMemoryType(uint32_t memoryType) const
392     {
393         uint32_t heapIndex = mMemoryProperties.memoryTypes[memoryType].heapIndex;
394         return mMemoryProperties.memoryHeaps[heapIndex].size;
395     }
396 
397     uint32_t getMemoryTypeCount() const { return mMemoryProperties.memoryTypeCount; }
398 
399   private:
400     VkPhysicalDeviceMemoryProperties mMemoryProperties;
401 };
402 
403 // Similar to StagingImage, for Buffers.
404 class StagingBuffer final : angle::NonCopyable
405 {
406   public:
407     StagingBuffer();
408     void release(ContextVk *contextVk);
409     void collectGarbage(RendererVk *renderer, Serial serial);
410     void destroy(RendererVk *renderer);
411 
412     angle::Result init(Context *context, VkDeviceSize size, StagingUsage usage);
413 
414     Buffer &getBuffer() { return mBuffer; }
415     const Buffer &getBuffer() const { return mBuffer; }
416     size_t getSize() const { return mSize; }
417 
418   private:
419     Buffer mBuffer;
420     Allocation mAllocation;
421     size_t mSize;
422 };
423 
424 angle::Result InitMappableAllocation(Context *context,
425                                      const Allocator &allocator,
426                                      Allocation *allocation,
427                                      VkDeviceSize size,
428                                      int value,
429                                      VkMemoryPropertyFlags memoryPropertyFlags);
430 
431 angle::Result InitMappableDeviceMemory(Context *context,
432                                        DeviceMemory *deviceMemory,
433                                        VkDeviceSize size,
434                                        int value,
435                                        VkMemoryPropertyFlags memoryPropertyFlags);
436 
437 angle::Result AllocateBufferMemory(Context *context,
438                                    VkMemoryPropertyFlags requestedMemoryPropertyFlags,
439                                    VkMemoryPropertyFlags *memoryPropertyFlagsOut,
440                                    const void *extraAllocationInfo,
441                                    Buffer *buffer,
442                                    DeviceMemory *deviceMemoryOut,
443                                    VkDeviceSize *sizeOut);
444 
445 angle::Result AllocateImageMemory(Context *context,
446                                   VkMemoryPropertyFlags memoryPropertyFlags,
447                                   VkMemoryPropertyFlags *memoryPropertyFlagsOut,
448                                   const void *extraAllocationInfo,
449                                   Image *image,
450                                   DeviceMemory *deviceMemoryOut,
451                                   VkDeviceSize *sizeOut);
452 
453 angle::Result AllocateImageMemoryWithRequirements(
454     Context *context,
455     VkMemoryPropertyFlags memoryPropertyFlags,
456     const VkMemoryRequirements &memoryRequirements,
457     const void *extraAllocationInfo,
458     const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
459     Image *image,
460     DeviceMemory *deviceMemoryOut);
461 
462 angle::Result AllocateBufferMemoryWithRequirements(Context *context,
463                                                    VkMemoryPropertyFlags memoryPropertyFlags,
464                                                    const VkMemoryRequirements &memoryRequirements,
465                                                    const void *extraAllocationInfo,
466                                                    Buffer *buffer,
467                                                    VkMemoryPropertyFlags *memoryPropertyFlagsOut,
468                                                    DeviceMemory *deviceMemoryOut);
469 
470 using ShaderAndSerial = ObjectAndSerial<ShaderModule>;
471 
472 angle::Result InitShaderAndSerial(Context *context,
473                                   ShaderAndSerial *shaderAndSerial,
474                                   const uint32_t *shaderCode,
475                                   size_t shaderCodeSize);
476 
477 gl::TextureType Get2DTextureType(uint32_t layerCount, GLint samples);
478 
479 enum class RecordingMode
480 {
481     Start,
482     Append,
483 };
484 
485 // Helper class to handle RAII patterns for initialization. Requires that T have a destroy method
486 // that takes a VkDevice and returns void.
487 template <typename T>
488 class ANGLE_NO_DISCARD DeviceScoped final : angle::NonCopyable
489 {
490   public:
491     DeviceScoped(VkDevice device) : mDevice(device) {}
492     ~DeviceScoped() { mVar.destroy(mDevice); }
493 
494     const T &get() const { return mVar; }
495     T &get() { return mVar; }
496 
497     T &&release() { return std::move(mVar); }
498 
499   private:
500     VkDevice mDevice;
501     T mVar;
502 };
503 
504 template <typename T>
505 class ANGLE_NO_DISCARD AllocatorScoped final : angle::NonCopyable
506 {
507   public:
508     AllocatorScoped(const Allocator &allocator) : mAllocator(allocator) {}
509     ~AllocatorScoped() { mVar.destroy(mAllocator); }
510 
511     const T &get() const { return mVar; }
512     T &get() { return mVar; }
513 
514     T &&release() { return std::move(mVar); }
515 
516   private:
517     const Allocator &mAllocator;
518     T mVar;
519 };
520 
521 // Similar to DeviceScoped, but releases objects instead of destroying them. Requires that T have a
522 // release method that takes a ContextVk * and returns void.
523 template <typename T>
524 class ANGLE_NO_DISCARD ContextScoped final : angle::NonCopyable
525 {
526   public:
527     ContextScoped(ContextVk *contextVk) : mContextVk(contextVk) {}
528     ~ContextScoped() { mVar.release(mContextVk); }
529 
530     const T &get() const { return mVar; }
531     T &get() { return mVar; }
532 
533     T &&release() { return std::move(mVar); }
534 
535   private:
536     ContextVk *mContextVk;
537     T mVar;
538 };
539 
540 template <typename T>
541 class ANGLE_NO_DISCARD RendererScoped final : angle::NonCopyable
542 {
543   public:
544     RendererScoped(RendererVk *renderer) : mRenderer(renderer) {}
545     ~RendererScoped() { mVar.release(mRenderer); }
546 
547     const T &get() const { return mVar; }
548     T &get() { return mVar; }
549 
550     T &&release() { return std::move(mVar); }
551 
552   private:
553     RendererVk *mRenderer;
554     T mVar;
555 };
556 
557 // This is a very simple RefCount class that has no autoreleasing. Used in the descriptor set and
558 // pipeline layout caches.
559 template <typename T>
560 class RefCounted : angle::NonCopyable
561 {
562   public:
563     RefCounted() : mRefCount(0) {}
564     explicit RefCounted(T &&newObject) : mRefCount(0), mObject(std::move(newObject)) {}
565     ~RefCounted() { ASSERT(mRefCount == 0 && !mObject.valid()); }
566 
567     RefCounted(RefCounted &&copy) : mRefCount(copy.mRefCount), mObject(std::move(copy.mObject))
568     {
569         ASSERT(this != &copy);
570         copy.mRefCount = 0;
571     }
572 
573     RefCounted &operator=(RefCounted &&rhs)
574     {
575         std::swap(mRefCount, rhs.mRefCount);
576         mObject = std::move(rhs.mObject);
577         return *this;
578     }
579 
580     void addRef()
581     {
582         ASSERT(mRefCount != std::numeric_limits<uint32_t>::max());
583         mRefCount++;
584     }
585 
586     void releaseRef()
587     {
588         ASSERT(isReferenced());
589         mRefCount--;
590     }
591 
592     bool isReferenced() const { return mRefCount != 0; }
593 
594     T &get() { return mObject; }
595     const T &get() const { return mObject; }
596 
597     // A debug function to validate that the reference count is as expected used for assertions.
598     bool isRefCountAsExpected(uint32_t expectedRefCount) { return mRefCount == expectedRefCount; }
599 
600   private:
601     uint32_t mRefCount;
602     T mObject;
603 };
604 
605 template <typename T>
606 class BindingPointer final : angle::NonCopyable
607 {
608   public:
609     BindingPointer() = default;
610     ~BindingPointer() { reset(); }
611 
612     BindingPointer(BindingPointer &&other)
613     {
614         set(other.mRefCounted);
615         other.reset();
616     }
617 
618     void set(RefCounted<T> *refCounted)
619     {
620         if (mRefCounted)
621         {
622             mRefCounted->releaseRef();
623         }
624 
625         mRefCounted = refCounted;
626 
627         if (mRefCounted)
628         {
629             mRefCounted->addRef();
630         }
631     }
632 
633     void reset() { set(nullptr); }
634 
635     T &get() { return mRefCounted->get(); }
636     const T &get() const { return mRefCounted->get(); }
637 
638     bool valid() const { return mRefCounted != nullptr; }
639 
640   private:
641     RefCounted<T> *mRefCounted = nullptr;
642 };
643 
644 // Helper class to share ref-counted Vulkan objects.  Requires that T have a destroy method
645 // that takes a VkDevice and returns void.
646 template <typename T>
647 class Shared final : angle::NonCopyable
648 {
649   public:
650     Shared() : mRefCounted(nullptr) {}
651     ~Shared() { ASSERT(mRefCounted == nullptr); }
652 
653     Shared(Shared &&other) { *this = std::move(other); }
654     Shared &operator=(Shared &&other)
655     {
656         ASSERT(this != &other);
657         mRefCounted       = other.mRefCounted;
658         other.mRefCounted = nullptr;
659         return *this;
660     }
661 
662     void set(VkDevice device, RefCounted<T> *refCounted)
663     {
664         if (mRefCounted)
665         {
666             mRefCounted->releaseRef();
667             if (!mRefCounted->isReferenced())
668             {
669                 mRefCounted->get().destroy(device);
670                 SafeDelete(mRefCounted);
671             }
672         }
673 
674         mRefCounted = refCounted;
675 
676         if (mRefCounted)
677         {
678             mRefCounted->addRef();
679         }
680     }
681 
682     void setUnreferenced(RefCounted<T> *refCounted)
683     {
684         ASSERT(!mRefCounted);
685         ASSERT(refCounted);
686 
687         mRefCounted = refCounted;
688         mRefCounted->addRef();
689     }
690 
691     void assign(VkDevice device, T &&newObject)
692     {
693         set(device, new RefCounted<T>(std::move(newObject)));
694     }
695 
696     void copy(VkDevice device, const Shared<T> &other) { set(device, other.mRefCounted); }
697 
698     void copyUnreferenced(const Shared<T> &other) { setUnreferenced(other.mRefCounted); }
699 
700     void reset(VkDevice device) { set(device, nullptr); }
701 
702     template <typename RecyclerT>
703     void resetAndRecycle(RecyclerT *recycler)
704     {
705         if (mRefCounted)
706         {
707             mRefCounted->releaseRef();
708             if (!mRefCounted->isReferenced())
709             {
710                 ASSERT(mRefCounted->get().valid());
711                 recycler->recycle(std::move(mRefCounted->get()));
712                 SafeDelete(mRefCounted);
713             }
714 
715             mRefCounted = nullptr;
716         }
717     }
718 
719     template <typename OnRelease>
720     void resetAndRelease(OnRelease *onRelease)
721     {
722         if (mRefCounted)
723         {
724             mRefCounted->releaseRef();
725             if (!mRefCounted->isReferenced())
726             {
727                 ASSERT(mRefCounted->get().valid());
728                 (*onRelease)(std::move(mRefCounted->get()));
729                 SafeDelete(mRefCounted);
730             }
731 
732             mRefCounted = nullptr;
733         }
734     }
735 
736     bool isReferenced() const
737     {
738         // If reference is zero, the object should have been deleted.  I.e. if the object is not
739         // nullptr, it should have a reference.
740         ASSERT(!mRefCounted || mRefCounted->isReferenced());
741         return mRefCounted != nullptr;
742     }
743 
744     T &get()
745     {
746         ASSERT(mRefCounted && mRefCounted->isReferenced());
747         return mRefCounted->get();
748     }
749     const T &get() const
750     {
751         ASSERT(mRefCounted && mRefCounted->isReferenced());
752         return mRefCounted->get();
753     }
754 
755   private:
756     RefCounted<T> *mRefCounted;
757 };
758 
759 template <typename T>
760 class Recycler final : angle::NonCopyable
761 {
762   public:
763     Recycler() = default;
764 
765     void recycle(T &&garbageObject) { mObjectFreeList.emplace_back(std::move(garbageObject)); }
766 
767     void fetch(T *outObject)
768     {
769         ASSERT(!empty());
770         *outObject = std::move(mObjectFreeList.back());
771         mObjectFreeList.pop_back();
772     }
773 
774     void destroy(VkDevice device)
775     {
776         for (T &object : mObjectFreeList)
777         {
778             object.destroy(device);
779         }
780         mObjectFreeList.clear();
781     }
782 
783     bool empty() const { return mObjectFreeList.empty(); }
784 
785   private:
786     std::vector<T> mObjectFreeList;
787 };
788 
789 ANGLE_ENABLE_STRUCT_PADDING_WARNINGS
790 struct SpecializationConstants final
791 {
792     VkBool32 lineRasterEmulation;
793     uint32_t surfaceRotation;
794     float drawableWidth;
795     float drawableHeight;
796     uint32_t dither;
797 };
798 ANGLE_DISABLE_STRUCT_PADDING_WARNINGS
799 
800 template <typename T>
801 using SpecializationConstantMap = angle::PackedEnumMap<sh::vk::SpecializationConstantId, T>;
802 
803 using ShaderAndSerialPointer = BindingPointer<ShaderAndSerial>;
804 using ShaderAndSerialMap     = gl::ShaderMap<ShaderAndSerialPointer>;
805 
806 void MakeDebugUtilsLabel(GLenum source, const char *marker, VkDebugUtilsLabelEXT *label);
807 
808 constexpr size_t kUnpackedDepthIndex   = gl::IMPLEMENTATION_MAX_DRAW_BUFFERS;
809 constexpr size_t kUnpackedStencilIndex = gl::IMPLEMENTATION_MAX_DRAW_BUFFERS + 1;
810 
811 class ClearValuesArray final
812 {
813   public:
814     ClearValuesArray();
815     ~ClearValuesArray();
816 
817     ClearValuesArray(const ClearValuesArray &other);
818     ClearValuesArray &operator=(const ClearValuesArray &rhs);
819 
820     void store(uint32_t index, VkImageAspectFlags aspectFlags, const VkClearValue &clearValue);
821     void storeNoDepthStencil(uint32_t index, const VkClearValue &clearValue);
822 
823     void reset(size_t index)
824     {
825         mValues[index] = {};
826         mEnabled.reset(index);
827     }
828 
829     bool test(size_t index) const { return mEnabled.test(index); }
830     bool testDepth() const { return mEnabled.test(kUnpackedDepthIndex); }
831     bool testStencil() const { return mEnabled.test(kUnpackedStencilIndex); }
832     gl::DrawBufferMask getColorMask() const;
833 
834     const VkClearValue &operator[](size_t index) const { return mValues[index]; }
835 
836     float getDepthValue() const { return mValues[kUnpackedDepthIndex].depthStencil.depth; }
837     uint32_t getStencilValue() const { return mValues[kUnpackedStencilIndex].depthStencil.stencil; }
838 
839     const VkClearValue *data() const { return mValues.data(); }
840     bool empty() const { return mEnabled.none(); }
841     bool any() const { return mEnabled.any(); }
842 
843   private:
844     gl::AttachmentArray<VkClearValue> mValues;
845     gl::AttachmentsMask mEnabled;
846 };
847 
848 // Defines Serials for Vulkan objects.
849 #define ANGLE_VK_SERIAL_OP(X) \
850     X(Buffer)                 \
851     X(Image)                  \
852     X(ImageOrBufferView)      \
853     X(Sampler)
854 
855 #define ANGLE_DEFINE_VK_SERIAL_TYPE(Type)                                     \
856     class Type##Serial                                                        \
857     {                                                                         \
858       public:                                                                 \
859         constexpr Type##Serial() : mSerial(kInvalid) {}                       \
860         constexpr explicit Type##Serial(uint32_t serial) : mSerial(serial) {} \
861                                                                               \
862         constexpr bool operator==(const Type##Serial &other) const            \
863         {                                                                     \
864             ASSERT(mSerial != kInvalid || other.mSerial != kInvalid);         \
865             return mSerial == other.mSerial;                                  \
866         }                                                                     \
867         constexpr bool operator!=(const Type##Serial &other) const            \
868         {                                                                     \
869             ASSERT(mSerial != kInvalid || other.mSerial != kInvalid);         \
870             return mSerial != other.mSerial;                                  \
871         }                                                                     \
872         constexpr uint32_t getValue() const { return mSerial; }               \
873         constexpr bool valid() const { return mSerial != kInvalid; }          \
874                                                                               \
875       private:                                                                \
876         uint32_t mSerial;                                                     \
877         static constexpr uint32_t kInvalid = 0;                               \
878     };                                                                        \
879     static constexpr Type##Serial kInvalid##Type##Serial = Type##Serial();
880 
881 ANGLE_VK_SERIAL_OP(ANGLE_DEFINE_VK_SERIAL_TYPE)
882 
883 #define ANGLE_DECLARE_GEN_VK_SERIAL(Type) Type##Serial generate##Type##Serial();
884 
885 class ResourceSerialFactory final : angle::NonCopyable
886 {
887   public:
888     ResourceSerialFactory();
889     ~ResourceSerialFactory();
890 
891     ANGLE_VK_SERIAL_OP(ANGLE_DECLARE_GEN_VK_SERIAL)
892 
893   private:
894     uint32_t issueSerial();
895 
896     // Kept atomic so it can be accessed from multiple Context threads at once.
897     std::atomic<uint32_t> mCurrentUniqueSerial;
898 };
899 
900 // BufferBlock
901 class BufferBlock final : angle::NonCopyable
902 {
903   public:
904     BufferBlock();
905     BufferBlock(BufferBlock &&other);
906     ~BufferBlock();
907 
908     void destroy(RendererVk *renderer);
909     angle::Result init(Context *context,
910                        Buffer &buffer,
911                        vma::VirtualBlockCreateFlags flags,
912                        DeviceMemory &deviceMemory,
913                        VkMemoryPropertyFlags memoryPropertyFlags,
914                        VkDeviceSize size);
915     void initWithoutVirtualBlock(Context *context,
916                                  Buffer &buffer,
917                                  DeviceMemory &deviceMemory,
918                                  VkMemoryPropertyFlags memoryPropertyFlags,
919                                  VkDeviceSize size);
920 
921     BufferBlock &operator=(BufferBlock &&other);
922 
923     const Buffer &getBuffer() const { return mBuffer; }
924     const DeviceMemory &getDeviceMemory() const { return mDeviceMemory; }
925     DeviceMemory &getDeviceMemory() { return mDeviceMemory; }
926     BufferSerial getBufferSerial() const { return mSerial; }
927 
928     VkMemoryPropertyFlags getMemoryPropertyFlags() const;
929     VkDeviceSize getMemorySize() const;
930 
931     VkResult allocate(VkDeviceSize size, VkDeviceSize alignment, VkDeviceSize *offsetOut);
932     void free(VkDeviceSize offset);
933     VkBool32 isEmpty();
934 
935     bool hasVirtualBlock() const { return mVirtualBlock.valid(); }
936     bool isHostVisible() const;
937     bool isCoherent() const;
938     bool isMapped() const;
939     VkResult map(const VkDevice device);
940     void unmap(const VkDevice device);
941     uint8_t *getMappedMemory() const;
942 
943     // This should be called whenever this found to be empty. The total number of count of empty is
944     // returned.
945     int32_t getAndIncrementEmptyCounter();
946 
947   private:
948     mutable std::mutex mVirtualBlockMutex;
949     VirtualBlock mVirtualBlock;
950 
951     Buffer mBuffer;
952     DeviceMemory mDeviceMemory;
953     VkMemoryPropertyFlags mMemoryPropertyFlags;
954     VkDeviceSize mSize;
955     uint8_t *mMappedMemory;
956     BufferSerial mSerial;
957     // Heuristic information for pruneEmptyBuffer. This tracks how many times (consecutively) this
958     // buffer block is found to be empty when pruneEmptyBuffer is called. This gets reset whenever
959     // it becomes non-empty.
960     int32_t mCountRemainsEmpty;
961 };
962 using BufferBlockPointerVector = std::vector<std::unique_ptr<BufferBlock>>;
963 
964 // BufferSuballocation
965 class BufferSuballocation final : angle::NonCopyable
966 {
967   public:
968     BufferSuballocation();
969 
970     BufferSuballocation(BufferSuballocation &&other);
971     BufferSuballocation &operator=(BufferSuballocation &&other);
972 
973     void destroy(RendererVk *renderer);
974 
975     void init(VkDevice device, BufferBlock *block, VkDeviceSize offset, VkDeviceSize size);
976     void initWithEntireBuffer(Context *context,
977                               Buffer &buffer,
978                               DeviceMemory &deviceMemory,
979                               VkMemoryPropertyFlags memoryPropertyFlags,
980                               VkDeviceSize size);
981 
982     const Buffer &getBuffer() const;
983     VkDeviceSize getSize() const;
984     const DeviceMemory &getDeviceMemory() const;
985     VkMemoryMapFlags getMemoryPropertyFlags() const;
986     bool isHostVisible() const;
987     bool isCoherent() const;
988     bool isMapped() const;
989     uint8_t *getMappedMemory() const;
990     void flush(const VkDevice &device);
991     void invalidate(const VkDevice &device);
992     VkDeviceSize getOffset() const;
993     bool valid() const;
994     VkResult map(Context *context);
995     BufferSerial getBlockSerial() const;
996     uint8_t *getBlockMemory() const;
997     VkDeviceSize getBlockMemorySize() const;
998     bool isSuballocated() const { return mBufferBlock->hasVirtualBlock(); }
999 
1000   private:
1001     // Only used by DynamicBuffer where DynamicBuffer does the actual suballocation and pass the
1002     // offset/size to this object. Since DynamicBuffer does not have a VMA virtual allocator, they
1003     // will be ignored at destroy time. The offset/size is set here mainly for easy retrieval when
1004     // the BufferHelper object is passed around.
1005     friend class BufferHelper;
1006     void setOffsetAndSize(VkDeviceSize offset, VkDeviceSize size);
1007 
1008     BufferBlock *mBufferBlock;
1009     VkDeviceSize mOffset;
1010     VkDeviceSize mSize;
1011 };
1012 
1013 // BufferBlock implementation.
1014 ANGLE_INLINE VkMemoryPropertyFlags BufferBlock::getMemoryPropertyFlags() const
1015 {
1016     return mMemoryPropertyFlags;
1017 }
1018 
1019 ANGLE_INLINE VkDeviceSize BufferBlock::getMemorySize() const
1020 {
1021     return mSize;
1022 }
1023 
1024 ANGLE_INLINE VkBool32 BufferBlock::isEmpty()
1025 {
1026     std::unique_lock<std::mutex> lock(mVirtualBlockMutex);
1027     return vma::IsVirtualBlockEmpty(mVirtualBlock.getHandle());
1028 }
1029 
1030 ANGLE_INLINE bool BufferBlock::isHostVisible() const
1031 {
1032     return (mMemoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
1033 }
1034 
1035 ANGLE_INLINE bool BufferBlock::isCoherent() const
1036 {
1037     return (mMemoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0;
1038 }
1039 
1040 ANGLE_INLINE bool BufferBlock::isMapped() const
1041 {
1042     return mMappedMemory != nullptr;
1043 }
1044 
1045 ANGLE_INLINE uint8_t *BufferBlock::getMappedMemory() const
1046 {
1047     ASSERT(mMappedMemory != nullptr);
1048     return mMappedMemory;
1049 }
1050 
1051 ANGLE_INLINE VkResult BufferBlock::allocate(VkDeviceSize size,
1052                                             VkDeviceSize alignment,
1053                                             VkDeviceSize *offsetOut)
1054 {
1055     std::unique_lock<std::mutex> lock(mVirtualBlockMutex);
1056     mCountRemainsEmpty = 0;
1057     return mVirtualBlock.allocate(size, alignment, offsetOut);
1058 }
1059 
1060 // BufferSuballocation implementation.
1061 ANGLE_INLINE BufferSuballocation::BufferSuballocation()
1062     : mBufferBlock(nullptr), mOffset(0), mSize(0)
1063 {}
1064 
1065 ANGLE_INLINE BufferSuballocation::BufferSuballocation(BufferSuballocation &&other)
1066     : BufferSuballocation()
1067 {
1068     *this = std::move(other);
1069 }
1070 
1071 ANGLE_INLINE BufferSuballocation &BufferSuballocation::operator=(BufferSuballocation &&other)
1072 {
1073     std::swap(mBufferBlock, other.mBufferBlock);
1074     std::swap(mSize, other.mSize);
1075     std::swap(mOffset, other.mOffset);
1076     return *this;
1077 }
1078 
1079 ANGLE_INLINE bool BufferSuballocation::valid() const
1080 {
1081     return mBufferBlock != nullptr;
1082 }
1083 
1084 ANGLE_INLINE void BufferSuballocation::destroy(RendererVk *renderer)
1085 {
1086     if (valid())
1087     {
1088         ASSERT(mBufferBlock);
1089         if (mBufferBlock->hasVirtualBlock())
1090         {
1091             mBufferBlock->free(mOffset);
1092             mBufferBlock = nullptr;
1093         }
1094         else
1095         {
1096             // When virtual block is invalid, this is the standalone buffer that are created by
1097             // BufferSuballocation::initWithEntireBuffer call. In this case, vmaBufferSuballocation
1098             // owns block, we must properly delete the block object.
1099             mBufferBlock->destroy(renderer);
1100             SafeDelete(mBufferBlock);
1101         }
1102         mOffset = 0;
1103         mSize   = 0;
1104     }
1105 }
1106 
1107 ANGLE_INLINE void BufferSuballocation::init(VkDevice device,
1108                                             BufferBlock *block,
1109                                             VkDeviceSize offset,
1110                                             VkDeviceSize size)
1111 {
1112     ASSERT(!valid());
1113     ASSERT(block != nullptr);
1114     ASSERT(offset != VK_WHOLE_SIZE);
1115     mBufferBlock = block;
1116     mOffset      = offset;
1117     mSize        = size;
1118 }
1119 
1120 ANGLE_INLINE void BufferSuballocation::initWithEntireBuffer(
1121     Context *context,
1122     Buffer &buffer,
1123     DeviceMemory &deviceMemory,
1124     VkMemoryPropertyFlags memoryPropertyFlags,
1125     VkDeviceSize size)
1126 {
1127     ASSERT(!valid());
1128 
1129     std::unique_ptr<BufferBlock> block = std::make_unique<BufferBlock>();
1130     block->initWithoutVirtualBlock(context, buffer, deviceMemory, memoryPropertyFlags, size);
1131 
1132     mBufferBlock = block.release();
1133     mOffset      = 0;
1134     mSize        = mBufferBlock->getMemorySize();
1135 }
1136 
1137 ANGLE_INLINE const Buffer &BufferSuballocation::getBuffer() const
1138 {
1139     return mBufferBlock->getBuffer();
1140 }
1141 
1142 ANGLE_INLINE VkDeviceSize BufferSuballocation::getSize() const
1143 {
1144     return mSize;
1145 }
1146 
1147 ANGLE_INLINE const DeviceMemory &BufferSuballocation::getDeviceMemory() const
1148 {
1149     return mBufferBlock->getDeviceMemory();
1150 }
1151 
1152 ANGLE_INLINE VkMemoryMapFlags BufferSuballocation::getMemoryPropertyFlags() const
1153 {
1154     return mBufferBlock->getMemoryPropertyFlags();
1155 }
1156 
1157 ANGLE_INLINE bool BufferSuballocation::isHostVisible() const
1158 {
1159     return mBufferBlock->isHostVisible();
1160 }
1161 ANGLE_INLINE bool BufferSuballocation::isCoherent() const
1162 {
1163     return mBufferBlock->isCoherent();
1164 }
1165 ANGLE_INLINE bool BufferSuballocation::isMapped() const
1166 {
1167     return mBufferBlock->isMapped();
1168 }
1169 ANGLE_INLINE uint8_t *BufferSuballocation::getMappedMemory() const
1170 {
1171     return mBufferBlock->getMappedMemory() + getOffset();
1172 }
1173 
1174 ANGLE_INLINE void BufferSuballocation::flush(const VkDevice &device)
1175 {
1176     if (!isCoherent())
1177     {
1178         VkMappedMemoryRange mappedRange = {};
1179         mappedRange.sType               = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1180         mappedRange.memory              = mBufferBlock->getDeviceMemory().getHandle();
1181         mappedRange.offset              = getOffset();
1182         mappedRange.size                = mSize;
1183         mBufferBlock->getDeviceMemory().flush(device, mappedRange);
1184     }
1185 }
1186 
1187 ANGLE_INLINE void BufferSuballocation::invalidate(const VkDevice &device)
1188 {
1189     if (!isCoherent())
1190     {
1191         VkMappedMemoryRange mappedRange = {};
1192         mappedRange.sType               = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1193         mappedRange.memory              = mBufferBlock->getDeviceMemory().getHandle();
1194         mappedRange.offset              = getOffset();
1195         mappedRange.size                = mSize;
1196         mBufferBlock->getDeviceMemory().invalidate(device, mappedRange);
1197     }
1198 }
1199 
1200 ANGLE_INLINE VkDeviceSize BufferSuballocation::getOffset() const
1201 {
1202     return mOffset;
1203 }
1204 
1205 ANGLE_INLINE void BufferSuballocation::setOffsetAndSize(VkDeviceSize offset, VkDeviceSize size)
1206 {
1207     mOffset = offset;
1208     mSize   = size;
1209 }
1210 
1211 ANGLE_INLINE uint8_t *BufferSuballocation::getBlockMemory() const
1212 {
1213     return mBufferBlock->getMappedMemory();
1214 }
1215 ANGLE_INLINE VkDeviceSize BufferSuballocation::getBlockMemorySize() const
1216 {
1217     return mBufferBlock->getMemorySize();
1218 }
1219 ANGLE_INLINE BufferSerial BufferSuballocation::getBlockSerial() const
1220 {
1221     ASSERT(valid());
1222     return mBufferBlock->getBufferSerial();
1223 }
1224 #if defined(ANGLE_ENABLE_PERF_COUNTER_OUTPUT)
1225 constexpr bool kOutputCumulativePerfCounters = ANGLE_ENABLE_PERF_COUNTER_OUTPUT;
1226 #else
1227 constexpr bool kOutputCumulativePerfCounters = false;
1228 #endif
1229 
1230 // Performance and resource counters.
1231 struct RenderPassPerfCounters
1232 {
1233     // load/storeOps. Includes ops for resolve attachment. Maximum value = 2.
1234     uint8_t colorLoadOpClears;
1235     uint8_t colorLoadOpLoads;
1236     uint8_t colorLoadOpNones;
1237     uint8_t colorStoreOpStores;
1238     uint8_t colorStoreOpNones;
1239     uint8_t depthLoadOpClears;
1240     uint8_t depthLoadOpLoads;
1241     uint8_t depthLoadOpNones;
1242     uint8_t depthStoreOpStores;
1243     uint8_t depthStoreOpNones;
1244     uint8_t stencilLoadOpClears;
1245     uint8_t stencilLoadOpLoads;
1246     uint8_t stencilLoadOpNones;
1247     uint8_t stencilStoreOpStores;
1248     uint8_t stencilStoreOpNones;
1249     // Number of unresolve and resolve operations.  Maximum value for color =
1250     // gl::IMPLEMENTATION_MAX_DRAW_BUFFERS and for depth/stencil = 1 each.
1251     uint8_t colorAttachmentUnresolves;
1252     uint8_t colorAttachmentResolves;
1253     uint8_t depthAttachmentUnresolves;
1254     uint8_t depthAttachmentResolves;
1255     uint8_t stencilAttachmentUnresolves;
1256     uint8_t stencilAttachmentResolves;
1257     // Whether the depth/stencil attachment is using a read-only layout.
1258     uint8_t readOnlyDepthStencil;
1259 };
1260 
1261 // A Vulkan image level index.
1262 using LevelIndex = gl::LevelIndexWrapper<uint32_t>;
1263 
1264 // Ensure viewport is within Vulkan requirements
1265 void ClampViewport(VkViewport *viewport);
1266 
1267 }  // namespace vk
1268 
1269 #if !defined(ANGLE_SHARED_LIBVULKAN)
1270 // Lazily load entry points for each extension as necessary.
1271 void InitDebugUtilsEXTFunctions(VkInstance instance);
1272 void InitDebugReportEXTFunctions(VkInstance instance);
1273 void InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance);
1274 void InitTransformFeedbackEXTFunctions(VkDevice device);
1275 void InitSamplerYcbcrKHRFunctions(VkDevice device);
1276 void InitRenderPass2KHRFunctions(VkDevice device);
1277 
1278 #    if defined(ANGLE_PLATFORM_FUCHSIA)
1279 // VK_FUCHSIA_imagepipe_surface
1280 void InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance);
1281 #    endif
1282 
1283 #    if defined(ANGLE_PLATFORM_ANDROID)
1284 // VK_ANDROID_external_memory_android_hardware_buffer
1285 void InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance);
1286 #    endif
1287 
1288 #    if defined(ANGLE_PLATFORM_GGP)
1289 // VK_GGP_stream_descriptor_surface
1290 void InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance);
1291 #    endif  // defined(ANGLE_PLATFORM_GGP)
1292 
1293 // VK_KHR_external_semaphore_fd
1294 void InitExternalSemaphoreFdFunctions(VkInstance instance);
1295 
1296 // VK_EXT_external_memory_host
1297 void InitExternalMemoryHostFunctions(VkInstance instance);
1298 
1299 // VK_EXT_external_memory_host
1300 void InitHostQueryResetFunctions(VkInstance instance);
1301 
1302 // VK_KHR_external_fence_capabilities
1303 void InitExternalFenceCapabilitiesFunctions(VkInstance instance);
1304 
1305 // VK_KHR_get_memory_requirements2
1306 void InitGetMemoryRequirements2KHRFunctions(VkDevice device);
1307 
1308 // VK_KHR_bind_memory2
1309 void InitBindMemory2KHRFunctions(VkDevice device);
1310 
1311 // VK_KHR_external_fence_fd
1312 void InitExternalFenceFdFunctions(VkInstance instance);
1313 
1314 // VK_KHR_external_semaphore_capabilities
1315 void InitExternalSemaphoreCapabilitiesFunctions(VkInstance instance);
1316 
1317 // VK_KHR_shared_presentable_image
1318 void InitGetSwapchainStatusKHRFunctions(VkDevice device);
1319 
1320 #endif  // !defined(ANGLE_SHARED_LIBVULKAN)
1321 
1322 GLenum CalculateGenerateMipmapFilter(ContextVk *contextVk, angle::FormatID formatID);
1323 size_t PackSampleCount(GLint sampleCount);
1324 
1325 namespace gl_vk
1326 {
1327 VkRect2D GetRect(const gl::Rectangle &source);
1328 VkFilter GetFilter(const GLenum filter);
1329 VkSamplerMipmapMode GetSamplerMipmapMode(const GLenum filter);
1330 VkSamplerAddressMode GetSamplerAddressMode(const GLenum wrap);
1331 VkPrimitiveTopology GetPrimitiveTopology(gl::PrimitiveMode mode);
1332 VkCullModeFlagBits GetCullMode(const gl::RasterizerState &rasterState);
1333 VkFrontFace GetFrontFace(GLenum frontFace, bool invertCullFace);
1334 VkSampleCountFlagBits GetSamples(GLint sampleCount);
1335 VkComponentSwizzle GetSwizzle(const GLenum swizzle);
1336 VkCompareOp GetCompareOp(const GLenum compareFunc);
1337 
1338 constexpr gl::ShaderMap<VkShaderStageFlagBits> kShaderStageMap = {
1339     {gl::ShaderType::Vertex, VK_SHADER_STAGE_VERTEX_BIT},
1340     {gl::ShaderType::TessControl, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT},
1341     {gl::ShaderType::TessEvaluation, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT},
1342     {gl::ShaderType::Fragment, VK_SHADER_STAGE_FRAGMENT_BIT},
1343     {gl::ShaderType::Geometry, VK_SHADER_STAGE_GEOMETRY_BIT},
1344     {gl::ShaderType::Compute, VK_SHADER_STAGE_COMPUTE_BIT},
1345 };
1346 
1347 void GetOffset(const gl::Offset &glOffset, VkOffset3D *vkOffset);
1348 void GetExtent(const gl::Extents &glExtent, VkExtent3D *vkExtent);
1349 VkImageType GetImageType(gl::TextureType textureType);
1350 VkImageViewType GetImageViewType(gl::TextureType textureType);
1351 VkColorComponentFlags GetColorComponentFlags(bool red, bool green, bool blue, bool alpha);
1352 VkShaderStageFlags GetShaderStageFlags(gl::ShaderBitSet activeShaders);
1353 
1354 void GetViewport(const gl::Rectangle &viewport,
1355                  float nearPlane,
1356                  float farPlane,
1357                  bool invertViewport,
1358                  bool upperLeftOrigin,
1359                  GLint renderAreaHeight,
1360                  VkViewport *viewportOut);
1361 
1362 void GetExtentsAndLayerCount(gl::TextureType textureType,
1363                              const gl::Extents &extents,
1364                              VkExtent3D *extentsOut,
1365                              uint32_t *layerCountOut);
1366 
1367 vk::LevelIndex GetLevelIndex(gl::LevelIndex levelGL, gl::LevelIndex baseLevel);
1368 
1369 }  // namespace gl_vk
1370 
1371 namespace vk_gl
1372 {
1373 // The Vulkan back-end will not support a sample count of 1, because of a Vulkan specification
1374 // restriction:
1375 //
1376 //   If the image was created with VkImageCreateInfo::samples equal to VK_SAMPLE_COUNT_1_BIT, the
1377 //   instruction must: have MS = 0.
1378 //
1379 // This restriction was tracked in http://anglebug.com/4196 and Khronos-private Vulkan
1380 // specification issue https://gitlab.khronos.org/vulkan/vulkan/issues/1925.
1381 //
1382 // In addition, the Vulkan back-end will not support sample counts of 32 or 64, since there are no
1383 // standard sample locations for those sample counts.
1384 constexpr unsigned int kSupportedSampleCounts = (VK_SAMPLE_COUNT_2_BIT | VK_SAMPLE_COUNT_4_BIT |
1385                                                  VK_SAMPLE_COUNT_8_BIT | VK_SAMPLE_COUNT_16_BIT);
1386 
1387 // Find set bits in sampleCounts and add the corresponding sample count to the set.
1388 void AddSampleCounts(VkSampleCountFlags sampleCounts, gl::SupportedSampleSet *outSet);
1389 // Return the maximum sample count with a bit set in |sampleCounts|.
1390 GLuint GetMaxSampleCount(VkSampleCountFlags sampleCounts);
1391 // Return a supported sample count that's at least as large as the requested one.
1392 GLuint GetSampleCount(VkSampleCountFlags supportedCounts, GLuint requestedCount);
1393 
1394 gl::LevelIndex GetLevelIndex(vk::LevelIndex levelVk, gl::LevelIndex baseLevel);
1395 }  // namespace vk_gl
1396 
1397 enum class RenderPassClosureReason
1398 {
1399     // Don't specify the reason (it should already be specified elsewhere)
1400     AlreadySpecifiedElsewhere,
1401 
1402     // Implicit closures due to flush/wait/etc.
1403     ContextDestruction,
1404     ContextChange,
1405     GLFlush,
1406     GLFinish,
1407     EGLSwapBuffers,
1408     EGLWaitClient,
1409 
1410     // Closure due to switching rendering to another framebuffer.
1411     FramebufferBindingChange,
1412     FramebufferChange,
1413     NewRenderPass,
1414 
1415     // Incompatible use of resource in the same render pass
1416     BufferUseThenXfbWrite,
1417     XfbWriteThenVertexIndexBuffer,
1418     XfbWriteThenIndirectDrawBuffer,
1419     XfbResumeAfterDrawBasedClear,
1420     DepthStencilUseInFeedbackLoop,
1421     DepthStencilWriteAfterFeedbackLoop,
1422     PipelineBindWhileXfbActive,
1423 
1424     // Use of resource after render pass
1425     BufferWriteThenMap,
1426     BufferUseThenOutOfRPRead,
1427     BufferUseThenOutOfRPWrite,
1428     ImageUseThenOutOfRPRead,
1429     ImageUseThenOutOfRPWrite,
1430     XfbWriteThenComputeRead,
1431     XfbWriteThenIndirectDispatchBuffer,
1432     ImageAttachmentThenComputeRead,
1433     GetQueryResult,
1434     BeginNonRenderPassQuery,
1435     EndNonRenderPassQuery,
1436     TimestampQuery,
1437     GLReadPixels,
1438 
1439     // Synchronization
1440     BufferUseThenReleaseToExternal,
1441     ImageUseThenReleaseToExternal,
1442     BufferInUseWhenSynchronizedMap,
1443     ImageOrphan,
1444     GLMemoryBarrierThenStorageResource,
1445     StorageResourceUseThenGLMemoryBarrier,
1446     ExternalSemaphoreSignal,
1447     SyncObjectInit,
1448     SyncObjectWithFdInit,
1449     SyncObjectClientWait,
1450     SyncObjectServerWait,
1451 
1452     // Closures that ANGLE could have avoided, but doesn't for simplicity or optimization of more
1453     // common cases.
1454     XfbPause,
1455     FramebufferFetchEmulation,
1456     ColorBufferInvalidate,
1457     GenerateMipmapOnCPU,
1458     CopyTextureOnCPU,
1459     TextureReformatToRenderable,
1460     DeviceLocalBufferMap,
1461 
1462     // UtilsVk
1463     PrepareForBlit,
1464     PrepareForImageCopy,
1465     TemporaryForImageClear,
1466     TemporaryForImageCopy,
1467     TemporaryForOverlayDraw,
1468 
1469     InvalidEnum,
1470     EnumCount = InvalidEnum,
1471 };
1472 
1473 }  // namespace rx
1474 
1475 #define ANGLE_VK_TRY(context, command)                                                   \
1476     do                                                                                   \
1477     {                                                                                    \
1478         auto ANGLE_LOCAL_VAR = command;                                                  \
1479         if (ANGLE_UNLIKELY(ANGLE_LOCAL_VAR != VK_SUCCESS))                               \
1480         {                                                                                \
1481             (context)->handleError(ANGLE_LOCAL_VAR, __FILE__, ANGLE_FUNCTION, __LINE__); \
1482             return angle::Result::Stop;                                                  \
1483         }                                                                                \
1484     } while (0)
1485 
1486 #define ANGLE_VK_CHECK(context, test, error) ANGLE_VK_TRY(context, test ? VK_SUCCESS : error)
1487 
1488 #define ANGLE_VK_CHECK_MATH(context, result) \
1489     ANGLE_VK_CHECK(context, result, VK_ERROR_VALIDATION_FAILED_EXT)
1490 
1491 #define ANGLE_VK_CHECK_ALLOC(context, result) \
1492     ANGLE_VK_CHECK(context, result, VK_ERROR_OUT_OF_HOST_MEMORY)
1493 
1494 #define ANGLE_VK_UNREACHABLE(context) \
1495     UNREACHABLE();                    \
1496     ANGLE_VK_CHECK(context, false, VK_ERROR_FEATURE_NOT_PRESENT)
1497 
1498 // NVIDIA uses special formatting for the driver version:
1499 // Major: 10
1500 // Minor: 8
1501 // Sub-minor: 8
1502 // patch: 6
1503 #define ANGLE_VK_VERSION_MAJOR_NVIDIA(version) (((uint32_t)(version) >> 22) & 0x3ff)
1504 #define ANGLE_VK_VERSION_MINOR_NVIDIA(version) (((uint32_t)(version) >> 14) & 0xff)
1505 #define ANGLE_VK_VERSION_SUB_MINOR_NVIDIA(version) (((uint32_t)(version) >> 6) & 0xff)
1506 #define ANGLE_VK_VERSION_PATCH_NVIDIA(version) ((uint32_t)(version)&0x3f)
1507 
1508 // Similarly for Intel on Windows:
1509 // Major: 18
1510 // Minor: 14
1511 #define ANGLE_VK_VERSION_MAJOR_WIN_INTEL(version) (((uint32_t)(version) >> 14) & 0x3ffff)
1512 #define ANGLE_VK_VERSION_MINOR_WIN_INTEL(version) ((uint32_t)(version)&0x3fff)
1513 
1514 #endif  // LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
1515