• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_utils:
7 //    Helper functions for the Vulkan Renderer.
8 //
9 
10 #ifndef LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
11 #define LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
12 
13 #include <atomic>
14 #include <limits>
15 
16 #include "GLSLANG/ShaderLang.h"
17 #include "common/FixedVector.h"
18 #include "common/Optional.h"
19 #include "common/PackedEnums.h"
20 #include "common/debug.h"
21 #include "libANGLE/Error.h"
22 #include "libANGLE/Observer.h"
23 #include "libANGLE/angletypes.h"
24 #include "libANGLE/renderer/serial_utils.h"
25 #include "libANGLE/renderer/vulkan/SecondaryCommandBuffer.h"
26 #include "libANGLE/renderer/vulkan/VulkanSecondaryCommandBuffer.h"
27 #include "libANGLE/renderer/vulkan/vk_wrapper.h"
28 #include "vulkan/vulkan_fuchsia_ext.h"
29 
30 #define ANGLE_GL_OBJECTS_X(PROC) \
31     PROC(Buffer)                 \
32     PROC(Context)                \
33     PROC(Framebuffer)            \
34     PROC(MemoryObject)           \
35     PROC(Overlay)                \
36     PROC(Program)                \
37     PROC(ProgramPipeline)        \
38     PROC(Query)                  \
39     PROC(Renderbuffer)           \
40     PROC(Sampler)                \
41     PROC(Semaphore)              \
42     PROC(Texture)                \
43     PROC(TransformFeedback)      \
44     PROC(VertexArray)
45 
46 #define ANGLE_PRE_DECLARE_OBJECT(OBJ) class OBJ;
47 
48 namespace egl
49 {
50 class Display;
51 class Image;
52 class ShareGroup;
53 }  // namespace egl
54 
55 namespace gl
56 {
57 class MockOverlay;
58 struct RasterizerState;
59 struct SwizzleState;
60 struct VertexAttribute;
61 class VertexBinding;
62 
63 ANGLE_GL_OBJECTS_X(ANGLE_PRE_DECLARE_OBJECT)
64 }  // namespace gl
65 
66 #define ANGLE_PRE_DECLARE_VK_OBJECT(OBJ) class OBJ##Vk;
67 
68 namespace rx
69 {
70 class DisplayVk;
71 class ImageVk;
72 class ProgramExecutableVk;
73 class RenderbufferVk;
74 class RenderTargetVk;
75 class RendererVk;
76 class RenderPassCache;
77 class ShareGroupVk;
78 }  // namespace rx
79 
80 namespace angle
81 {
82 egl::Error ToEGL(Result result, rx::DisplayVk *displayVk, EGLint errorCode);
83 }  // namespace angle
84 
85 namespace rx
86 {
87 ANGLE_GL_OBJECTS_X(ANGLE_PRE_DECLARE_VK_OBJECT)
88 
89 const char *VulkanResultString(VkResult result);
90 
91 constexpr size_t kMaxVulkanLayers = 20;
92 using VulkanLayerVector           = angle::FixedVector<const char *, kMaxVulkanLayers>;
93 
94 // Verify that validation layers are available.
95 bool GetAvailableValidationLayers(const std::vector<VkLayerProperties> &layerProps,
96                                   bool mustHaveLayers,
97                                   VulkanLayerVector *enabledLayerNames);
98 
99 enum class TextureDimension
100 {
101     TEX_2D,
102     TEX_CUBE,
103     TEX_3D,
104     TEX_2D_ARRAY,
105 };
106 
107 // A maximum offset of 4096 covers almost every Vulkan driver on desktop (80%) and mobile (99%). The
108 // next highest values to meet native drivers are 16 bits or 32 bits.
109 constexpr uint32_t kAttributeOffsetMaxBits = 15;
110 
111 namespace vk
112 {
113 // A packed attachment index interface with vulkan API
114 class PackedAttachmentIndex final
115 {
116   public:
PackedAttachmentIndex(uint32_t index)117     explicit constexpr PackedAttachmentIndex(uint32_t index) : mAttachmentIndex(index) {}
118     constexpr PackedAttachmentIndex(const PackedAttachmentIndex &other) = default;
119     constexpr PackedAttachmentIndex &operator=(const PackedAttachmentIndex &other) = default;
120 
get()121     constexpr uint32_t get() const { return mAttachmentIndex; }
122     PackedAttachmentIndex &operator++()
123     {
124         ++mAttachmentIndex;
125         return *this;
126     }
127     constexpr bool operator==(const PackedAttachmentIndex &other) const
128     {
129         return mAttachmentIndex == other.mAttachmentIndex;
130     }
131     constexpr bool operator!=(const PackedAttachmentIndex &other) const
132     {
133         return mAttachmentIndex != other.mAttachmentIndex;
134     }
135     constexpr bool operator<(const PackedAttachmentIndex &other) const
136     {
137         return mAttachmentIndex < other.mAttachmentIndex;
138     }
139 
140   private:
141     uint32_t mAttachmentIndex;
142 };
143 using PackedAttachmentCount                                    = PackedAttachmentIndex;
144 static constexpr PackedAttachmentIndex kAttachmentIndexInvalid = PackedAttachmentIndex(-1);
145 static constexpr PackedAttachmentIndex kAttachmentIndexZero    = PackedAttachmentIndex(0);
146 
147 // Prepend ptr to the pNext chain at chainStart
148 template <typename VulkanStruct1, typename VulkanStruct2>
AddToPNextChain(VulkanStruct1 * chainStart,VulkanStruct2 * ptr)149 void AddToPNextChain(VulkanStruct1 *chainStart, VulkanStruct2 *ptr)
150 {
151     ASSERT(ptr->pNext == nullptr);
152 
153     VkBaseOutStructure *localPtr = reinterpret_cast<VkBaseOutStructure *>(chainStart);
154     ptr->pNext                   = localPtr->pNext;
155     localPtr->pNext              = reinterpret_cast<VkBaseOutStructure *>(ptr);
156 }
157 
158 // Append ptr to the end of the chain
159 template <typename VulkanStruct1, typename VulkanStruct2>
AppendToPNextChain(VulkanStruct1 * chainStart,VulkanStruct2 * ptr)160 void AppendToPNextChain(VulkanStruct1 *chainStart, VulkanStruct2 *ptr)
161 {
162     if (!ptr)
163     {
164         return;
165     }
166 
167     VkBaseOutStructure *endPtr = reinterpret_cast<VkBaseOutStructure *>(chainStart);
168     while (endPtr->pNext)
169     {
170         endPtr = endPtr->pNext;
171     }
172     endPtr->pNext = reinterpret_cast<VkBaseOutStructure *>(ptr);
173 }
174 
175 struct Error
176 {
177     VkResult errorCode;
178     const char *file;
179     const char *function;
180     uint32_t line;
181 };
182 
183 // Abstracts error handling. Implemented by both ContextVk for GL and DisplayVk for EGL errors.
184 class Context : angle::NonCopyable
185 {
186   public:
187     Context(RendererVk *renderer);
188     virtual ~Context();
189 
190     virtual void handleError(VkResult result,
191                              const char *file,
192                              const char *function,
193                              unsigned int line) = 0;
194     VkDevice getDevice() const;
getRenderer()195     RendererVk *getRenderer() const { return mRenderer; }
196 
197   protected:
198     RendererVk *const mRenderer;
199 };
200 
201 class RenderPassDesc;
202 
203 #if ANGLE_USE_CUSTOM_VULKAN_CMD_BUFFERS
204 using CommandBuffer = priv::SecondaryCommandBuffer;
205 #else
206 using CommandBuffer                          = VulkanSecondaryCommandBuffer;
207 #endif
208 
209 using SecondaryCommandBufferList = std::vector<CommandBuffer>;
210 
211 VkImageAspectFlags GetDepthStencilAspectFlags(const angle::Format &format);
212 VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format);
213 
214 template <typename T>
215 struct ImplTypeHelper;
216 
217 // clang-format off
218 #define ANGLE_IMPL_TYPE_HELPER_GL(OBJ) \
219 template<>                             \
220 struct ImplTypeHelper<gl::OBJ>         \
221 {                                      \
222     using ImplType = OBJ##Vk;          \
223 };
224 // clang-format on
225 
226 ANGLE_GL_OBJECTS_X(ANGLE_IMPL_TYPE_HELPER_GL)
227 
228 template <>
229 struct ImplTypeHelper<gl::MockOverlay>
230 {
231     using ImplType = OverlayVk;
232 };
233 
234 template <>
235 struct ImplTypeHelper<egl::Display>
236 {
237     using ImplType = DisplayVk;
238 };
239 
240 template <>
241 struct ImplTypeHelper<egl::Image>
242 {
243     using ImplType = ImageVk;
244 };
245 
246 template <>
247 struct ImplTypeHelper<egl::ShareGroup>
248 {
249     using ImplType = ShareGroupVk;
250 };
251 
252 template <typename T>
253 using GetImplType = typename ImplTypeHelper<T>::ImplType;
254 
255 template <typename T>
256 GetImplType<T> *GetImpl(const T *glObject)
257 {
258     return GetImplAs<GetImplType<T>>(glObject);
259 }
260 
261 template <>
262 inline OverlayVk *GetImpl(const gl::MockOverlay *glObject)
263 {
264     return nullptr;
265 }
266 
267 template <typename ObjT>
268 class ObjectAndSerial final : angle::NonCopyable
269 {
270   public:
271     ObjectAndSerial() {}
272 
273     ObjectAndSerial(ObjT &&object, Serial serial) : mObject(std::move(object)), mSerial(serial) {}
274 
275     ObjectAndSerial(ObjectAndSerial &&other)
276         : mObject(std::move(other.mObject)), mSerial(std::move(other.mSerial))
277     {}
278     ObjectAndSerial &operator=(ObjectAndSerial &&other)
279     {
280         mObject = std::move(other.mObject);
281         mSerial = std::move(other.mSerial);
282         return *this;
283     }
284 
285     Serial getSerial() const { return mSerial; }
286     void updateSerial(Serial newSerial) { mSerial = newSerial; }
287 
288     const ObjT &get() const { return mObject; }
289     ObjT &get() { return mObject; }
290 
291     bool valid() const { return mObject.valid(); }
292 
293     void destroy(VkDevice device)
294     {
295         mObject.destroy(device);
296         mSerial = Serial();
297     }
298 
299   private:
300     ObjT mObject;
301     Serial mSerial;
302 };
303 
304 // Reference to a deleted object. The object is due to be destroyed at some point in the future.
305 // |mHandleType| determines the type of the object and which destroy function should be called.
306 class GarbageObject
307 {
308   public:
309     GarbageObject();
310     GarbageObject(GarbageObject &&other);
311     GarbageObject &operator=(GarbageObject &&rhs);
312 
313     bool valid() const { return mHandle != VK_NULL_HANDLE; }
314     void destroy(RendererVk *renderer);
315 
316     template <typename DerivedT, typename HandleT>
317     static GarbageObject Get(WrappedObject<DerivedT, HandleT> *object)
318     {
319         // Using c-style cast here to avoid conditional compile for MSVC 32-bit
320         //  which fails to compile with reinterpret_cast, requiring static_cast.
321         return GarbageObject(HandleTypeHelper<DerivedT>::kHandleType,
322                              (GarbageHandle)(object->release()));
323     }
324 
325   private:
326     VK_DEFINE_NON_DISPATCHABLE_HANDLE(GarbageHandle)
327     GarbageObject(HandleType handleType, GarbageHandle handle);
328 
329     HandleType mHandleType;
330     GarbageHandle mHandle;
331 };
332 
333 template <typename T>
334 GarbageObject GetGarbage(T *obj)
335 {
336     return GarbageObject::Get(obj);
337 }
338 
339 // A list of garbage objects. Has no object lifetime information.
340 using GarbageList = std::vector<GarbageObject>;
341 
342 // A list of garbage objects and the associated serial after which the objects can be destroyed.
343 using GarbageAndSerial = ObjectAndSerial<GarbageList>;
344 
345 // Houses multiple lists of garbage objects. Each sub-list has a different lifetime. They should be
346 // sorted such that later-living garbage is ordered later in the list.
347 using GarbageQueue = std::vector<GarbageAndSerial>;
348 
349 class MemoryProperties final : angle::NonCopyable
350 {
351   public:
352     MemoryProperties();
353 
354     void init(VkPhysicalDevice physicalDevice);
355     bool hasLazilyAllocatedMemory() const;
356     angle::Result findCompatibleMemoryIndex(Context *context,
357                                             const VkMemoryRequirements &memoryRequirements,
358                                             VkMemoryPropertyFlags requestedMemoryPropertyFlags,
359                                             bool isExternalMemory,
360                                             VkMemoryPropertyFlags *memoryPropertyFlagsOut,
361                                             uint32_t *indexOut) const;
362     void destroy();
363 
364     VkDeviceSize getHeapSizeForMemoryType(uint32_t memoryType) const
365     {
366         uint32_t heapIndex = mMemoryProperties.memoryTypes[memoryType].heapIndex;
367         return mMemoryProperties.memoryHeaps[heapIndex].size;
368     }
369 
370     uint32_t getMemoryTypeCount() const { return mMemoryProperties.memoryTypeCount; }
371 
372   private:
373     VkPhysicalDeviceMemoryProperties mMemoryProperties;
374 };
375 
376 class BufferMemory : angle::NonCopyable
377 {
378   public:
379     BufferMemory();
380     ~BufferMemory();
381     angle::Result initExternal(void *clientBuffer);
382     angle::Result init();
383 
384     void destroy(RendererVk *renderer);
385 
386     angle::Result map(ContextVk *contextVk, VkDeviceSize size, uint8_t **ptrOut)
387     {
388         if (mMappedMemory == nullptr)
389         {
390             ANGLE_TRY(mapImpl(contextVk, size));
391         }
392         *ptrOut = mMappedMemory;
393         return angle::Result::Continue;
394     }
395     void unmap(RendererVk *renderer);
396     void flush(RendererVk *renderer,
397                VkMemoryMapFlags memoryPropertyFlags,
398                VkDeviceSize offset,
399                VkDeviceSize size);
400     void invalidate(RendererVk *renderer,
401                     VkMemoryMapFlags memoryPropertyFlags,
402                     VkDeviceSize offset,
403                     VkDeviceSize size);
404 
405     bool isExternalBuffer() const { return mClientBuffer != nullptr; }
406 
407     uint8_t *getMappedMemory() const { return mMappedMemory; }
408     DeviceMemory *getExternalMemoryObject() { return &mExternalMemory; }
409     Allocation *getMemoryObject() { return &mAllocation; }
410 
411   private:
412     angle::Result mapImpl(ContextVk *contextVk, VkDeviceSize size);
413 
414     Allocation mAllocation;        // use mAllocation if isExternalBuffer() is false
415     DeviceMemory mExternalMemory;  // use mExternalMemory if isExternalBuffer() is true
416 
417     void *mClientBuffer;
418     uint8_t *mMappedMemory;
419 };
420 
421 // Similar to StagingImage, for Buffers.
422 class StagingBuffer final : angle::NonCopyable
423 {
424   public:
425     StagingBuffer();
426     void release(ContextVk *contextVk);
427     void collectGarbage(RendererVk *renderer, Serial serial);
428     void destroy(RendererVk *renderer);
429 
430     angle::Result init(Context *context, VkDeviceSize size, StagingUsage usage);
431 
432     Buffer &getBuffer() { return mBuffer; }
433     const Buffer &getBuffer() const { return mBuffer; }
434     size_t getSize() const { return mSize; }
435 
436   private:
437     Buffer mBuffer;
438     Allocation mAllocation;
439     size_t mSize;
440 };
441 
442 angle::Result InitMappableAllocation(Context *context,
443                                      const Allocator &allocator,
444                                      Allocation *allocation,
445                                      VkDeviceSize size,
446                                      int value,
447                                      VkMemoryPropertyFlags memoryPropertyFlags);
448 
449 angle::Result InitMappableDeviceMemory(Context *context,
450                                        DeviceMemory *deviceMemory,
451                                        VkDeviceSize size,
452                                        int value,
453                                        VkMemoryPropertyFlags memoryPropertyFlags);
454 
455 angle::Result AllocateBufferMemory(Context *context,
456                                    VkMemoryPropertyFlags requestedMemoryPropertyFlags,
457                                    VkMemoryPropertyFlags *memoryPropertyFlagsOut,
458                                    const void *extraAllocationInfo,
459                                    Buffer *buffer,
460                                    DeviceMemory *deviceMemoryOut,
461                                    VkDeviceSize *sizeOut);
462 
463 angle::Result AllocateImageMemory(Context *context,
464                                   VkMemoryPropertyFlags memoryPropertyFlags,
465                                   VkMemoryPropertyFlags *memoryPropertyFlagsOut,
466                                   const void *extraAllocationInfo,
467                                   Image *image,
468                                   DeviceMemory *deviceMemoryOut,
469                                   VkDeviceSize *sizeOut);
470 
471 angle::Result AllocateImageMemoryWithRequirements(
472     Context *context,
473     VkMemoryPropertyFlags memoryPropertyFlags,
474     const VkMemoryRequirements &memoryRequirements,
475     const void *extraAllocationInfo,
476     const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
477     Image *image,
478     DeviceMemory *deviceMemoryOut);
479 
480 angle::Result AllocateBufferMemoryWithRequirements(Context *context,
481                                                    VkMemoryPropertyFlags memoryPropertyFlags,
482                                                    const VkMemoryRequirements &memoryRequirements,
483                                                    const void *extraAllocationInfo,
484                                                    Buffer *buffer,
485                                                    VkMemoryPropertyFlags *memoryPropertyFlagsOut,
486                                                    DeviceMemory *deviceMemoryOut);
487 
488 using ShaderAndSerial = ObjectAndSerial<ShaderModule>;
489 
490 angle::Result InitShaderAndSerial(Context *context,
491                                   ShaderAndSerial *shaderAndSerial,
492                                   const uint32_t *shaderCode,
493                                   size_t shaderCodeSize);
494 
495 gl::TextureType Get2DTextureType(uint32_t layerCount, GLint samples);
496 
497 enum class RecordingMode
498 {
499     Start,
500     Append,
501 };
502 
503 // Helper class to handle RAII patterns for initialization. Requires that T have a destroy method
504 // that takes a VkDevice and returns void.
505 template <typename T>
506 class DeviceScoped final : angle::NonCopyable
507 {
508   public:
509     DeviceScoped(VkDevice device) : mDevice(device) {}
510     ~DeviceScoped() { mVar.destroy(mDevice); }
511 
512     const T &get() const { return mVar; }
513     T &get() { return mVar; }
514 
515     T &&release() { return std::move(mVar); }
516 
517   private:
518     VkDevice mDevice;
519     T mVar;
520 };
521 
522 // Similar to DeviceScoped, but releases objects instead of destroying them. Requires that T have a
523 // release method that takes a ContextVk * and returns void.
524 template <typename T>
525 class ContextScoped final : angle::NonCopyable
526 {
527   public:
528     ContextScoped(ContextVk *contextVk) : mContextVk(contextVk) {}
529     ~ContextScoped() { mVar.release(mContextVk); }
530 
531     const T &get() const { return mVar; }
532     T &get() { return mVar; }
533 
534     T &&release() { return std::move(mVar); }
535 
536   private:
537     ContextVk *mContextVk;
538     T mVar;
539 };
540 
541 template <typename T>
542 class RendererScoped final : angle::NonCopyable
543 {
544   public:
545     RendererScoped(RendererVk *renderer) : mRenderer(renderer) {}
546     ~RendererScoped() { mVar.release(mRenderer); }
547 
548     const T &get() const { return mVar; }
549     T &get() { return mVar; }
550 
551     T &&release() { return std::move(mVar); }
552 
553   private:
554     RendererVk *mRenderer;
555     T mVar;
556 };
557 
558 // This is a very simple RefCount class that has no autoreleasing. Used in the descriptor set and
559 // pipeline layout caches.
560 template <typename T>
561 class RefCounted : angle::NonCopyable
562 {
563   public:
564     RefCounted() : mRefCount(0) {}
565     explicit RefCounted(T &&newObject) : mRefCount(0), mObject(std::move(newObject)) {}
566     ~RefCounted() { ASSERT(mRefCount == 0 && !mObject.valid()); }
567 
568     RefCounted(RefCounted &&copy) : mRefCount(copy.mRefCount), mObject(std::move(copy.mObject))
569     {
570         ASSERT(this != &copy);
571         copy.mRefCount = 0;
572     }
573 
574     RefCounted &operator=(RefCounted &&rhs)
575     {
576         std::swap(mRefCount, rhs.mRefCount);
577         mObject = std::move(rhs.mObject);
578         return *this;
579     }
580 
581     void addRef()
582     {
583         ASSERT(mRefCount != std::numeric_limits<uint32_t>::max());
584         mRefCount++;
585     }
586 
587     void releaseRef()
588     {
589         ASSERT(isReferenced());
590         mRefCount--;
591     }
592 
593     bool isReferenced() const { return mRefCount != 0; }
594 
595     T &get() { return mObject; }
596     const T &get() const { return mObject; }
597 
598     // A debug function to validate that the reference count is as expected used for assertions.
599     bool isRefCountAsExpected(uint32_t expectedRefCount) { return mRefCount == expectedRefCount; }
600 
601   private:
602     uint32_t mRefCount;
603     T mObject;
604 };
605 
606 template <typename T>
607 class BindingPointer final : angle::NonCopyable
608 {
609   public:
610     BindingPointer() = default;
611     ~BindingPointer() { reset(); }
612 
613     BindingPointer(BindingPointer &&other)
614     {
615         set(other.mRefCounted);
616         other.reset();
617     }
618 
619     void set(RefCounted<T> *refCounted)
620     {
621         if (mRefCounted)
622         {
623             mRefCounted->releaseRef();
624         }
625 
626         mRefCounted = refCounted;
627 
628         if (mRefCounted)
629         {
630             mRefCounted->addRef();
631         }
632     }
633 
634     void reset() { set(nullptr); }
635 
636     T &get() { return mRefCounted->get(); }
637     const T &get() const { return mRefCounted->get(); }
638 
639     bool valid() const { return mRefCounted != nullptr; }
640 
641   private:
642     RefCounted<T> *mRefCounted = nullptr;
643 };
644 
645 // Helper class to share ref-counted Vulkan objects.  Requires that T have a destroy method
646 // that takes a VkDevice and returns void.
647 template <typename T>
648 class Shared final : angle::NonCopyable
649 {
650   public:
651     Shared() : mRefCounted(nullptr) {}
652     ~Shared() { ASSERT(mRefCounted == nullptr); }
653 
654     Shared(Shared &&other) { *this = std::move(other); }
655     Shared &operator=(Shared &&other)
656     {
657         ASSERT(this != &other);
658         mRefCounted       = other.mRefCounted;
659         other.mRefCounted = nullptr;
660         return *this;
661     }
662 
663     void set(VkDevice device, RefCounted<T> *refCounted)
664     {
665         if (mRefCounted)
666         {
667             mRefCounted->releaseRef();
668             if (!mRefCounted->isReferenced())
669             {
670                 mRefCounted->get().destroy(device);
671                 SafeDelete(mRefCounted);
672             }
673         }
674 
675         mRefCounted = refCounted;
676 
677         if (mRefCounted)
678         {
679             mRefCounted->addRef();
680         }
681     }
682 
683     void setUnreferenced(RefCounted<T> *refCounted)
684     {
685         ASSERT(!mRefCounted);
686         ASSERT(refCounted);
687 
688         mRefCounted = refCounted;
689         mRefCounted->addRef();
690     }
691 
692     void assign(VkDevice device, T &&newObject)
693     {
694         set(device, new RefCounted<T>(std::move(newObject)));
695     }
696 
697     void copy(VkDevice device, const Shared<T> &other) { set(device, other.mRefCounted); }
698 
699     void copyUnreferenced(const Shared<T> &other) { setUnreferenced(other.mRefCounted); }
700 
701     void reset(VkDevice device) { set(device, nullptr); }
702 
703     template <typename RecyclerT>
704     void resetAndRecycle(RecyclerT *recycler)
705     {
706         if (mRefCounted)
707         {
708             mRefCounted->releaseRef();
709             if (!mRefCounted->isReferenced())
710             {
711                 ASSERT(mRefCounted->get().valid());
712                 recycler->recycle(std::move(mRefCounted->get()));
713                 SafeDelete(mRefCounted);
714             }
715 
716             mRefCounted = nullptr;
717         }
718     }
719 
720     template <typename OnRelease>
721     void resetAndRelease(OnRelease *onRelease)
722     {
723         if (mRefCounted)
724         {
725             mRefCounted->releaseRef();
726             if (!mRefCounted->isReferenced())
727             {
728                 ASSERT(mRefCounted->get().valid());
729                 (*onRelease)(std::move(mRefCounted->get()));
730                 SafeDelete(mRefCounted);
731             }
732 
733             mRefCounted = nullptr;
734         }
735     }
736 
737     bool isReferenced() const
738     {
739         // If reference is zero, the object should have been deleted.  I.e. if the object is not
740         // nullptr, it should have a reference.
741         ASSERT(!mRefCounted || mRefCounted->isReferenced());
742         return mRefCounted != nullptr;
743     }
744 
745     T &get()
746     {
747         ASSERT(mRefCounted && mRefCounted->isReferenced());
748         return mRefCounted->get();
749     }
750     const T &get() const
751     {
752         ASSERT(mRefCounted && mRefCounted->isReferenced());
753         return mRefCounted->get();
754     }
755 
756   private:
757     RefCounted<T> *mRefCounted;
758 };
759 
760 template <typename T>
761 class Recycler final : angle::NonCopyable
762 {
763   public:
764     Recycler() = default;
765 
766     void recycle(T &&garbageObject) { mObjectFreeList.emplace_back(std::move(garbageObject)); }
767 
768     void fetch(T *outObject)
769     {
770         ASSERT(!empty());
771         *outObject = std::move(mObjectFreeList.back());
772         mObjectFreeList.pop_back();
773     }
774 
775     void destroy(VkDevice device)
776     {
777         for (T &object : mObjectFreeList)
778         {
779             object.destroy(device);
780         }
781     }
782 
783     bool empty() const { return mObjectFreeList.empty(); }
784 
785   private:
786     std::vector<T> mObjectFreeList;
787 };
788 
789 ANGLE_ENABLE_STRUCT_PADDING_WARNINGS
790 struct SpecializationConstants final
791 {
792     VkBool32 lineRasterEmulation;
793     uint32_t surfaceRotation;
794     float drawableWidth;
795     float drawableHeight;
796 };
797 ANGLE_DISABLE_STRUCT_PADDING_WARNINGS
798 
799 template <typename T>
800 using SpecializationConstantMap = angle::PackedEnumMap<sh::vk::SpecializationConstantId, T>;
801 
802 using ShaderAndSerialPointer = BindingPointer<ShaderAndSerial>;
803 using ShaderAndSerialMap     = gl::ShaderMap<ShaderAndSerialPointer>;
804 
805 void MakeDebugUtilsLabel(GLenum source, const char *marker, VkDebugUtilsLabelEXT *label);
806 
807 constexpr size_t kUnpackedDepthIndex   = gl::IMPLEMENTATION_MAX_DRAW_BUFFERS;
808 constexpr size_t kUnpackedStencilIndex = gl::IMPLEMENTATION_MAX_DRAW_BUFFERS + 1;
809 
810 class ClearValuesArray final
811 {
812   public:
813     ClearValuesArray();
814     ~ClearValuesArray();
815 
816     ClearValuesArray(const ClearValuesArray &other);
817     ClearValuesArray &operator=(const ClearValuesArray &rhs);
818 
819     void store(uint32_t index, VkImageAspectFlags aspectFlags, const VkClearValue &clearValue);
820     void storeNoDepthStencil(uint32_t index, const VkClearValue &clearValue);
821 
822     void reset(size_t index)
823     {
824         mValues[index] = {};
825         mEnabled.reset(index);
826     }
827 
828     bool test(size_t index) const { return mEnabled.test(index); }
829     bool testDepth() const { return mEnabled.test(kUnpackedDepthIndex); }
830     bool testStencil() const { return mEnabled.test(kUnpackedStencilIndex); }
831     gl::DrawBufferMask getColorMask() const;
832 
833     const VkClearValue &operator[](size_t index) const { return mValues[index]; }
834 
835     float getDepthValue() const { return mValues[kUnpackedDepthIndex].depthStencil.depth; }
836     uint32_t getStencilValue() const { return mValues[kUnpackedStencilIndex].depthStencil.stencil; }
837 
838     const VkClearValue *data() const { return mValues.data(); }
839     bool empty() const { return mEnabled.none(); }
840     bool any() const { return mEnabled.any(); }
841 
842   private:
843     gl::AttachmentArray<VkClearValue> mValues;
844     gl::AttachmentsMask mEnabled;
845 };
846 
847 // Defines Serials for Vulkan objects.
848 #define ANGLE_VK_SERIAL_OP(X) \
849     X(Buffer)                 \
850     X(Image)                  \
851     X(ImageOrBufferView)      \
852     X(Sampler)
853 
854 #define ANGLE_DEFINE_VK_SERIAL_TYPE(Type)                                     \
855     class Type##Serial                                                        \
856     {                                                                         \
857       public:                                                                 \
858         constexpr Type##Serial() : mSerial(kInvalid) {}                       \
859         constexpr explicit Type##Serial(uint32_t serial) : mSerial(serial) {} \
860                                                                               \
861         constexpr bool operator==(const Type##Serial &other) const            \
862         {                                                                     \
863             ASSERT(mSerial != kInvalid);                                      \
864             ASSERT(other.mSerial != kInvalid);                                \
865             return mSerial == other.mSerial;                                  \
866         }                                                                     \
867         constexpr bool operator!=(const Type##Serial &other) const            \
868         {                                                                     \
869             ASSERT(mSerial != kInvalid);                                      \
870             ASSERT(other.mSerial != kInvalid);                                \
871             return mSerial != other.mSerial;                                  \
872         }                                                                     \
873         constexpr uint32_t getValue() const { return mSerial; }               \
874         constexpr bool valid() const { return mSerial != kInvalid; }          \
875                                                                               \
876       private:                                                                \
877         uint32_t mSerial;                                                     \
878         static constexpr uint32_t kInvalid = 0;                               \
879     };                                                                        \
880     static constexpr Type##Serial kInvalid##Type##Serial = Type##Serial();
881 
882 ANGLE_VK_SERIAL_OP(ANGLE_DEFINE_VK_SERIAL_TYPE)
883 
884 #define ANGLE_DECLARE_GEN_VK_SERIAL(Type) Type##Serial generate##Type##Serial();
885 
886 class ResourceSerialFactory final : angle::NonCopyable
887 {
888   public:
889     ResourceSerialFactory();
890     ~ResourceSerialFactory();
891 
892     ANGLE_VK_SERIAL_OP(ANGLE_DECLARE_GEN_VK_SERIAL)
893 
894   private:
895     uint32_t issueSerial();
896 
897     // Kept atomic so it can be accessed from multiple Context threads at once.
898     std::atomic<uint32_t> mCurrentUniqueSerial;
899 };
900 
901 #if defined(ANGLE_ENABLE_PERF_COUNTER_OUTPUT)
902 constexpr bool kOutputCumulativePerfCounters = ANGLE_ENABLE_PERF_COUNTER_OUTPUT;
903 #else
904 constexpr bool kOutputCumulativePerfCounters = false;
905 #endif
906 
907 // Performance and resource counters.
908 struct RenderPassPerfCounters
909 {
910     // load/storeOps. Includes ops for resolve attachment. Maximum value = 2.
911     uint8_t depthClears;
912     uint8_t depthLoads;
913     uint8_t depthStores;
914     uint8_t stencilClears;
915     uint8_t stencilLoads;
916     uint8_t stencilStores;
917     // Number of unresolve and resolve operations.  Maximum value for color =
918     // gl::IMPLEMENTATION_MAX_DRAW_BUFFERS and for depth/stencil = 1 each.
919     uint8_t colorAttachmentUnresolves;
920     uint8_t colorAttachmentResolves;
921     uint8_t depthAttachmentUnresolves;
922     uint8_t depthAttachmentResolves;
923     uint8_t stencilAttachmentUnresolves;
924     uint8_t stencilAttachmentResolves;
925     // Whether the depth/stencil attachment is using a read-only layout.
926     uint8_t readOnlyDepthStencil;
927 };
928 
929 struct PerfCounters
930 {
931     uint32_t primaryBuffers;
932     uint32_t renderPasses;
933     uint32_t writeDescriptorSets;
934     uint32_t flushedOutsideRenderPassCommandBuffers;
935     uint32_t resolveImageCommands;
936     uint32_t depthClears;
937     uint32_t depthLoads;
938     uint32_t depthStores;
939     uint32_t stencilClears;
940     uint32_t stencilLoads;
941     uint32_t stencilStores;
942     uint32_t colorAttachmentUnresolves;
943     uint32_t depthAttachmentUnresolves;
944     uint32_t stencilAttachmentUnresolves;
945     uint32_t colorAttachmentResolves;
946     uint32_t depthAttachmentResolves;
947     uint32_t stencilAttachmentResolves;
948     uint32_t readOnlyDepthStencilRenderPasses;
949     uint32_t descriptorSetAllocations;
950     uint32_t shaderBuffersDescriptorSetCacheHits;
951     uint32_t shaderBuffersDescriptorSetCacheMisses;
952     uint32_t buffersGhosted;
953     uint32_t vertexArraySyncStateCalls;
954 };
955 
956 // A Vulkan image level index.
957 using LevelIndex = gl::LevelIndexWrapper<uint32_t>;
958 
959 // Ensure viewport is within Vulkan requirements
960 void ClampViewport(VkViewport *viewport);
961 
962 }  // namespace vk
963 
964 #if !defined(ANGLE_SHARED_LIBVULKAN)
965 // Lazily load entry points for each extension as necessary.
966 void InitDebugUtilsEXTFunctions(VkInstance instance);
967 void InitDebugReportEXTFunctions(VkInstance instance);
968 void InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance);
969 void InitTransformFeedbackEXTFunctions(VkDevice device);
970 void InitSamplerYcbcrKHRFunctions(VkDevice device);
971 void InitRenderPass2KHRFunctions(VkDevice device);
972 
973 #    if defined(ANGLE_PLATFORM_FUCHSIA)
974 // VK_FUCHSIA_imagepipe_surface
975 void InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance);
976 #    endif
977 
978 #    if defined(ANGLE_PLATFORM_ANDROID)
979 // VK_ANDROID_external_memory_android_hardware_buffer
980 void InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance);
981 #    endif
982 
983 #    if defined(ANGLE_PLATFORM_GGP)
984 // VK_GGP_stream_descriptor_surface
985 void InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance);
986 #    endif  // defined(ANGLE_PLATFORM_GGP)
987 
988 // VK_KHR_external_semaphore_fd
989 void InitExternalSemaphoreFdFunctions(VkInstance instance);
990 
991 // VK_EXT_external_memory_host
992 void InitExternalMemoryHostFunctions(VkInstance instance);
993 
994 // VK_EXT_external_memory_host
995 void InitHostQueryResetFunctions(VkInstance instance);
996 
997 // VK_KHR_external_fence_capabilities
998 void InitExternalFenceCapabilitiesFunctions(VkInstance instance);
999 
1000 // VK_KHR_get_memory_requirements2
1001 void InitGetMemoryRequirements2KHRFunctions(VkDevice device);
1002 
1003 // VK_KHR_bind_memory2
1004 void InitBindMemory2KHRFunctions(VkDevice device);
1005 
1006 // VK_KHR_external_fence_fd
1007 void InitExternalFenceFdFunctions(VkInstance instance);
1008 
1009 // VK_KHR_external_semaphore_capabilities
1010 void InitExternalSemaphoreCapabilitiesFunctions(VkInstance instance);
1011 
1012 // VK_KHR_shared_presentable_image
1013 void InitGetSwapchainStatusKHRFunctions(VkDevice device);
1014 
1015 #endif  // !defined(ANGLE_SHARED_LIBVULKAN)
1016 
1017 GLenum CalculateGenerateMipmapFilter(ContextVk *contextVk, angle::FormatID formatID);
1018 size_t PackSampleCount(GLint sampleCount);
1019 
1020 namespace gl_vk
1021 {
1022 VkRect2D GetRect(const gl::Rectangle &source);
1023 VkFilter GetFilter(const GLenum filter);
1024 VkSamplerMipmapMode GetSamplerMipmapMode(const GLenum filter);
1025 VkSamplerAddressMode GetSamplerAddressMode(const GLenum wrap);
1026 VkPrimitiveTopology GetPrimitiveTopology(gl::PrimitiveMode mode);
1027 VkCullModeFlagBits GetCullMode(const gl::RasterizerState &rasterState);
1028 VkFrontFace GetFrontFace(GLenum frontFace, bool invertCullFace);
1029 VkSampleCountFlagBits GetSamples(GLint sampleCount);
1030 VkComponentSwizzle GetSwizzle(const GLenum swizzle);
1031 VkCompareOp GetCompareOp(const GLenum compareFunc);
1032 
1033 constexpr gl::ShaderMap<VkShaderStageFlagBits> kShaderStageMap = {
1034     {gl::ShaderType::Vertex, VK_SHADER_STAGE_VERTEX_BIT},
1035     {gl::ShaderType::TessControl, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT},
1036     {gl::ShaderType::TessEvaluation, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT},
1037     {gl::ShaderType::Fragment, VK_SHADER_STAGE_FRAGMENT_BIT},
1038     {gl::ShaderType::Geometry, VK_SHADER_STAGE_GEOMETRY_BIT},
1039     {gl::ShaderType::Compute, VK_SHADER_STAGE_COMPUTE_BIT},
1040 };
1041 
1042 void GetOffset(const gl::Offset &glOffset, VkOffset3D *vkOffset);
1043 void GetExtent(const gl::Extents &glExtent, VkExtent3D *vkExtent);
1044 VkImageType GetImageType(gl::TextureType textureType);
1045 VkImageViewType GetImageViewType(gl::TextureType textureType);
1046 VkColorComponentFlags GetColorComponentFlags(bool red, bool green, bool blue, bool alpha);
1047 VkShaderStageFlags GetShaderStageFlags(gl::ShaderBitSet activeShaders);
1048 
1049 void GetViewport(const gl::Rectangle &viewport,
1050                  float nearPlane,
1051                  float farPlane,
1052                  bool invertViewport,
1053                  bool upperLeftOrigin,
1054                  GLint renderAreaHeight,
1055                  VkViewport *viewportOut);
1056 
1057 void GetExtentsAndLayerCount(gl::TextureType textureType,
1058                              const gl::Extents &extents,
1059                              VkExtent3D *extentsOut,
1060                              uint32_t *layerCountOut);
1061 
1062 vk::LevelIndex GetLevelIndex(gl::LevelIndex levelGL, gl::LevelIndex baseLevel);
1063 
1064 }  // namespace gl_vk
1065 
1066 namespace vk_gl
1067 {
1068 // The Vulkan back-end will not support a sample count of 1, because of a Vulkan specification
1069 // restriction:
1070 //
1071 //   If the image was created with VkImageCreateInfo::samples equal to VK_SAMPLE_COUNT_1_BIT, the
1072 //   instruction must: have MS = 0.
1073 //
1074 // This restriction was tracked in http://anglebug.com/4196 and Khronos-private Vulkan
1075 // specification issue https://gitlab.khronos.org/vulkan/vulkan/issues/1925.
1076 //
1077 // In addition, the Vulkan back-end will not support sample counts of 32 or 64, since there are no
1078 // standard sample locations for those sample counts.
1079 constexpr unsigned int kSupportedSampleCounts = (VK_SAMPLE_COUNT_2_BIT | VK_SAMPLE_COUNT_4_BIT |
1080                                                  VK_SAMPLE_COUNT_8_BIT | VK_SAMPLE_COUNT_16_BIT);
1081 
1082 // Find set bits in sampleCounts and add the corresponding sample count to the set.
1083 void AddSampleCounts(VkSampleCountFlags sampleCounts, gl::SupportedSampleSet *outSet);
1084 // Return the maximum sample count with a bit set in |sampleCounts|.
1085 GLuint GetMaxSampleCount(VkSampleCountFlags sampleCounts);
1086 // Return a supported sample count that's at least as large as the requested one.
1087 GLuint GetSampleCount(VkSampleCountFlags supportedCounts, GLuint requestedCount);
1088 
1089 gl::LevelIndex GetLevelIndex(vk::LevelIndex levelVk, gl::LevelIndex baseLevel);
1090 }  // namespace vk_gl
1091 
1092 enum class RenderPassClosureReason
1093 {
1094     // Don't specify the reason (it should already be specified elsewhere)
1095     AlreadySpecifiedElsewhere,
1096 
1097     // Implicit closures due to flush/wait/etc.
1098     ContextDestruction,
1099     ContextChange,
1100     GLFlush,
1101     GLFinish,
1102     EGLSwapBuffers,
1103     EGLWaitClient,
1104 
1105     // Closure due to switching rendering to another framebuffer.
1106     FramebufferBindingChange,
1107     FramebufferChange,
1108     NewRenderPass,
1109 
1110     // Incompatible use of resource in the same render pass
1111     BufferUseThenXfbWrite,
1112     XfbWriteThenVertexIndexBuffer,
1113     XfbWriteThenIndirectDrawBuffer,
1114     XfbResumeAfterDrawBasedClear,
1115     DepthStencilUseInFeedbackLoop,
1116     DepthStencilWriteAfterFeedbackLoop,
1117     PipelineBindWhileXfbActive,
1118 
1119     // Use of resource after render pass
1120     BufferWriteThenMap,
1121     BufferUseThenOutOfRPRead,
1122     BufferUseThenOutOfRPWrite,
1123     ImageUseThenOutOfRPRead,
1124     ImageUseThenOutOfRPWrite,
1125     XfbWriteThenComputeRead,
1126     XfbWriteThenIndirectDispatchBuffer,
1127     ImageAttachmentThenComputeRead,
1128     GetQueryResult,
1129     BeginNonRenderPassQuery,
1130     EndNonRenderPassQuery,
1131     TimestampQuery,
1132     GLReadPixels,
1133 
1134     // Synchronization
1135     BufferUseThenReleaseToExternal,
1136     ImageUseThenReleaseToExternal,
1137     BufferInUseWhenSynchronizedMap,
1138     ImageOrphan,
1139     GLMemoryBarrierThenStorageResource,
1140     StorageResourceUseThenGLMemoryBarrier,
1141     ExternalSemaphoreSignal,
1142     SyncObjectInit,
1143     SyncObjectWithFdInit,
1144     SyncObjectClientWait,
1145     SyncObjectServerWait,
1146 
1147     // Closures that ANGLE could have avoided, but doesn't for simplicity or optimization of more
1148     // common cases.
1149     XfbPause,
1150     FramebufferFetchEmulation,
1151     ColorBufferInvalidate,
1152     GenerateMipmapOnCPU,
1153     CopyTextureOnCPU,
1154     TextureReformatToRenderable,
1155     DeviceLocalBufferMap,
1156 
1157     // UtilsVk
1158     PrepareForBlit,
1159     PrepareForImageCopy,
1160     TemporaryForImageClear,
1161     TemporaryForImageCopy,
1162 
1163     // Misc
1164     OverlayFontCreation,
1165 
1166     InvalidEnum,
1167     EnumCount = InvalidEnum,
1168 };
1169 
1170 }  // namespace rx
1171 
1172 #define ANGLE_VK_TRY(context, command)                                                   \
1173     do                                                                                   \
1174     {                                                                                    \
1175         auto ANGLE_LOCAL_VAR = command;                                                  \
1176         if (ANGLE_UNLIKELY(ANGLE_LOCAL_VAR != VK_SUCCESS))                               \
1177         {                                                                                \
1178             (context)->handleError(ANGLE_LOCAL_VAR, __FILE__, ANGLE_FUNCTION, __LINE__); \
1179             return angle::Result::Stop;                                                  \
1180         }                                                                                \
1181     } while (0)
1182 
1183 #define ANGLE_VK_CHECK(context, test, error) ANGLE_VK_TRY(context, test ? VK_SUCCESS : error)
1184 
1185 #define ANGLE_VK_CHECK_MATH(context, result) \
1186     ANGLE_VK_CHECK(context, result, VK_ERROR_VALIDATION_FAILED_EXT)
1187 
1188 #define ANGLE_VK_CHECK_ALLOC(context, result) \
1189     ANGLE_VK_CHECK(context, result, VK_ERROR_OUT_OF_HOST_MEMORY)
1190 
1191 #define ANGLE_VK_UNREACHABLE(context) \
1192     UNREACHABLE();                    \
1193     ANGLE_VK_CHECK(context, false, VK_ERROR_FEATURE_NOT_PRESENT)
1194 
1195 // NVIDIA uses special formatting for the driver version:
1196 // Major: 10
1197 // Minor: 8
1198 // Sub-minor: 8
1199 // patch: 6
1200 #define ANGLE_VK_VERSION_MAJOR_NVIDIA(version) (((uint32_t)(version) >> 22) & 0x3ff)
1201 #define ANGLE_VK_VERSION_MINOR_NVIDIA(version) (((uint32_t)(version) >> 14) & 0xff)
1202 #define ANGLE_VK_VERSION_SUB_MINOR_NVIDIA(version) (((uint32_t)(version) >> 6) & 0xff)
1203 #define ANGLE_VK_VERSION_PATCH_NVIDIA(version) ((uint32_t)(version)&0x3f)
1204 
1205 // Similarly for Intel on Windows:
1206 // Major: 18
1207 // Minor: 14
1208 #define ANGLE_VK_VERSION_MAJOR_WIN_INTEL(version) (((uint32_t)(version) >> 14) & 0x3ffff)
1209 #define ANGLE_VK_VERSION_MINOR_WIN_INTEL(version) ((uint32_t)(version)&0x3fff)
1210 
1211 #endif  // LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
1212