1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_utils:
7 // Helper functions for the Vulkan Renderer.
8 //
9
10 #ifndef LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
11 #define LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
12
13 #include <atomic>
14 #include <limits>
15
16 #include "GLSLANG/ShaderLang.h"
17 #include "common/FixedVector.h"
18 #include "common/Optional.h"
19 #include "common/PackedEnums.h"
20 #include "common/debug.h"
21 #include "libANGLE/Error.h"
22 #include "libANGLE/Observer.h"
23 #include "libANGLE/renderer/serial_utils.h"
24 #include "libANGLE/renderer/vulkan/SecondaryCommandBuffer.h"
25 #include "libANGLE/renderer/vulkan/vk_wrapper.h"
26 #include "vulkan/vulkan_fuchsia_ext.h"
27
28 #define ANGLE_GL_OBJECTS_X(PROC) \
29 PROC(Buffer) \
30 PROC(Context) \
31 PROC(Framebuffer) \
32 PROC(MemoryObject) \
33 PROC(Overlay) \
34 PROC(Program) \
35 PROC(ProgramPipeline) \
36 PROC(Query) \
37 PROC(Renderbuffer) \
38 PROC(Sampler) \
39 PROC(Semaphore) \
40 PROC(Texture) \
41 PROC(TransformFeedback) \
42 PROC(VertexArray)
43
44 #define ANGLE_PRE_DECLARE_OBJECT(OBJ) class OBJ;
45
46 namespace egl
47 {
48 class Display;
49 class Image;
50 class ShareGroup;
51 } // namespace egl
52
53 namespace gl
54 {
55 struct Box;
56 class MockOverlay;
57 struct Extents;
58 struct RasterizerState;
59 struct Rectangle;
60 class State;
61 struct SwizzleState;
62 struct VertexAttribute;
63 class VertexBinding;
64
65 ANGLE_GL_OBJECTS_X(ANGLE_PRE_DECLARE_OBJECT)
66 } // namespace gl
67
68 #define ANGLE_PRE_DECLARE_VK_OBJECT(OBJ) class OBJ##Vk;
69
70 namespace rx
71 {
72 class DisplayVk;
73 class ImageVk;
74 class ProgramExecutableVk;
75 class RenderbufferVk;
76 class RenderTargetVk;
77 class RendererVk;
78 class RenderPassCache;
79 class ShareGroupVk;
80 } // namespace rx
81
82 namespace angle
83 {
84 egl::Error ToEGL(Result result, rx::DisplayVk *displayVk, EGLint errorCode);
85 } // namespace angle
86
87 namespace rx
88 {
89 ANGLE_GL_OBJECTS_X(ANGLE_PRE_DECLARE_VK_OBJECT)
90
91 const char *VulkanResultString(VkResult result);
92
93 constexpr size_t kMaxVulkanLayers = 20;
94 using VulkanLayerVector = angle::FixedVector<const char *, kMaxVulkanLayers>;
95
96 // Verify that validation layers are available.
97 bool GetAvailableValidationLayers(const std::vector<VkLayerProperties> &layerProps,
98 bool mustHaveLayers,
99 VulkanLayerVector *enabledLayerNames);
100
101 enum class TextureDimension
102 {
103 TEX_2D,
104 TEX_CUBE,
105 TEX_3D,
106 TEX_2D_ARRAY,
107 };
108
109 // A maximum offset of 4096 covers almost every Vulkan driver on desktop (80%) and mobile (99%). The
110 // next highest values to meet native drivers are 16 bits or 32 bits.
111 constexpr uint32_t kAttributeOffsetMaxBits = 15;
112
113 namespace vk
114 {
115 struct Format;
116
117 // A packed attachment index interface with vulkan API
118 class PackedAttachmentIndex final
119 {
120 public:
PackedAttachmentIndex(uint32_t index)121 explicit constexpr PackedAttachmentIndex(uint32_t index) : mAttachmentIndex(index) {}
122 constexpr PackedAttachmentIndex(const PackedAttachmentIndex &other) = default;
123 constexpr PackedAttachmentIndex &operator=(const PackedAttachmentIndex &other) = default;
124
get()125 constexpr uint32_t get() const { return mAttachmentIndex; }
126 PackedAttachmentIndex &operator++()
127 {
128 ++mAttachmentIndex;
129 return *this;
130 }
131 constexpr bool operator==(const PackedAttachmentIndex &other) const
132 {
133 return mAttachmentIndex == other.mAttachmentIndex;
134 }
135 constexpr bool operator!=(const PackedAttachmentIndex &other) const
136 {
137 return mAttachmentIndex != other.mAttachmentIndex;
138 }
139 constexpr bool operator<(const PackedAttachmentIndex &other) const
140 {
141 return mAttachmentIndex < other.mAttachmentIndex;
142 }
143
144 private:
145 uint32_t mAttachmentIndex;
146 };
147 using PackedAttachmentCount = PackedAttachmentIndex;
148 static constexpr PackedAttachmentIndex kAttachmentIndexInvalid = PackedAttachmentIndex(-1);
149 static constexpr PackedAttachmentIndex kAttachmentIndexZero = PackedAttachmentIndex(0);
150
151 // Prepend ptr to the pNext chain at chainStart
152 template <typename VulkanStruct1, typename VulkanStruct2>
AddToPNextChain(VulkanStruct1 * chainStart,VulkanStruct2 * ptr)153 void AddToPNextChain(VulkanStruct1 *chainStart, VulkanStruct2 *ptr)
154 {
155 ASSERT(ptr->pNext == nullptr);
156
157 VkBaseOutStructure *localPtr = reinterpret_cast<VkBaseOutStructure *>(chainStart);
158 ptr->pNext = localPtr->pNext;
159 localPtr->pNext = reinterpret_cast<VkBaseOutStructure *>(ptr);
160 }
161
162 struct Error
163 {
164 VkResult errorCode;
165 const char *file;
166 const char *function;
167 uint32_t line;
168 };
169
170 // Abstracts error handling. Implemented by both ContextVk for GL and DisplayVk for EGL errors.
171 class Context : angle::NonCopyable
172 {
173 public:
174 Context(RendererVk *renderer);
175 virtual ~Context();
176
177 virtual void handleError(VkResult result,
178 const char *file,
179 const char *function,
180 unsigned int line) = 0;
181 VkDevice getDevice() const;
getRenderer()182 RendererVk *getRenderer() const { return mRenderer; }
183
184 protected:
185 RendererVk *const mRenderer;
186 };
187
188 #if ANGLE_USE_CUSTOM_VULKAN_CMD_BUFFERS
189 using CommandBuffer = priv::SecondaryCommandBuffer;
190 #else
191 using CommandBuffer = priv::CommandBuffer;
192 #endif
193
194 using PrimaryCommandBuffer = priv::CommandBuffer;
195
196 VkImageAspectFlags GetDepthStencilAspectFlags(const angle::Format &format);
197 VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format);
198
199 template <typename T>
200 struct ImplTypeHelper;
201
202 // clang-format off
203 #define ANGLE_IMPL_TYPE_HELPER_GL(OBJ) \
204 template<> \
205 struct ImplTypeHelper<gl::OBJ> \
206 { \
207 using ImplType = OBJ##Vk; \
208 };
209 // clang-format on
210
211 ANGLE_GL_OBJECTS_X(ANGLE_IMPL_TYPE_HELPER_GL)
212
213 template <>
214 struct ImplTypeHelper<gl::MockOverlay>
215 {
216 using ImplType = OverlayVk;
217 };
218
219 template <>
220 struct ImplTypeHelper<egl::Display>
221 {
222 using ImplType = DisplayVk;
223 };
224
225 template <>
226 struct ImplTypeHelper<egl::Image>
227 {
228 using ImplType = ImageVk;
229 };
230
231 template <>
232 struct ImplTypeHelper<egl::ShareGroup>
233 {
234 using ImplType = ShareGroupVk;
235 };
236
237 template <typename T>
238 using GetImplType = typename ImplTypeHelper<T>::ImplType;
239
240 template <typename T>
241 GetImplType<T> *GetImpl(const T *glObject)
242 {
243 return GetImplAs<GetImplType<T>>(glObject);
244 }
245
246 template <>
247 inline OverlayVk *GetImpl(const gl::MockOverlay *glObject)
248 {
249 return nullptr;
250 }
251
252 template <typename ObjT>
253 class ObjectAndSerial final : angle::NonCopyable
254 {
255 public:
256 ObjectAndSerial() {}
257
258 ObjectAndSerial(ObjT &&object, Serial serial) : mObject(std::move(object)), mSerial(serial) {}
259
260 ObjectAndSerial(ObjectAndSerial &&other)
261 : mObject(std::move(other.mObject)), mSerial(std::move(other.mSerial))
262 {}
263 ObjectAndSerial &operator=(ObjectAndSerial &&other)
264 {
265 mObject = std::move(other.mObject);
266 mSerial = std::move(other.mSerial);
267 return *this;
268 }
269
270 Serial getSerial() const { return mSerial; }
271 void updateSerial(Serial newSerial) { mSerial = newSerial; }
272
273 const ObjT &get() const { return mObject; }
274 ObjT &get() { return mObject; }
275
276 bool valid() const { return mObject.valid(); }
277
278 void destroy(VkDevice device)
279 {
280 mObject.destroy(device);
281 mSerial = Serial();
282 }
283
284 private:
285 ObjT mObject;
286 Serial mSerial;
287 };
288
289 // Reference to a deleted object. The object is due to be destroyed at some point in the future.
290 // |mHandleType| determines the type of the object and which destroy function should be called.
291 class GarbageObject
292 {
293 public:
294 GarbageObject();
295 GarbageObject(GarbageObject &&other);
296 GarbageObject &operator=(GarbageObject &&rhs);
297
298 bool valid() const { return mHandle != VK_NULL_HANDLE; }
299 void destroy(RendererVk *renderer);
300
301 template <typename DerivedT, typename HandleT>
302 static GarbageObject Get(WrappedObject<DerivedT, HandleT> *object)
303 {
304 // Using c-style cast here to avoid conditional compile for MSVC 32-bit
305 // which fails to compile with reinterpret_cast, requiring static_cast.
306 return GarbageObject(HandleTypeHelper<DerivedT>::kHandleType,
307 (GarbageHandle)(object->release()));
308 }
309
310 private:
311 VK_DEFINE_NON_DISPATCHABLE_HANDLE(GarbageHandle)
312 GarbageObject(HandleType handleType, GarbageHandle handle);
313
314 HandleType mHandleType;
315 GarbageHandle mHandle;
316 };
317
318 template <typename T>
319 GarbageObject GetGarbage(T *obj)
320 {
321 return GarbageObject::Get(obj);
322 }
323
324 // A list of garbage objects. Has no object lifetime information.
325 using GarbageList = std::vector<GarbageObject>;
326
327 // A list of garbage objects and the associated serial after which the objects can be destroyed.
328 using GarbageAndSerial = ObjectAndSerial<GarbageList>;
329
330 // Houses multiple lists of garbage objects. Each sub-list has a different lifetime. They should be
331 // sorted such that later-living garbage is ordered later in the list.
332 using GarbageQueue = std::vector<GarbageAndSerial>;
333
334 class MemoryProperties final : angle::NonCopyable
335 {
336 public:
337 MemoryProperties();
338
339 void init(VkPhysicalDevice physicalDevice);
340 bool hasLazilyAllocatedMemory() const;
341 angle::Result findCompatibleMemoryIndex(Context *context,
342 const VkMemoryRequirements &memoryRequirements,
343 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
344 bool isExternalMemory,
345 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
346 uint32_t *indexOut) const;
347 void destroy();
348
349 VkDeviceSize getHeapSizeForMemoryType(uint32_t memoryType) const
350 {
351 uint32_t heapIndex = mMemoryProperties.memoryTypes[memoryType].heapIndex;
352 return mMemoryProperties.memoryHeaps[heapIndex].size;
353 }
354
355 private:
356 VkPhysicalDeviceMemoryProperties mMemoryProperties;
357 };
358
359 // Similar to StagingImage, for Buffers.
360 class StagingBuffer final : angle::NonCopyable
361 {
362 public:
363 StagingBuffer();
364 void release(ContextVk *contextVk);
365 void collectGarbage(RendererVk *renderer, Serial serial);
366 void destroy(RendererVk *renderer);
367
368 angle::Result init(Context *context, VkDeviceSize size, StagingUsage usage);
369
370 Buffer &getBuffer() { return mBuffer; }
371 const Buffer &getBuffer() const { return mBuffer; }
372 size_t getSize() const { return mSize; }
373
374 private:
375 Buffer mBuffer;
376 Allocation mAllocation;
377 size_t mSize;
378 };
379
380 angle::Result InitMappableAllocation(Context *context,
381 const Allocator &allocator,
382 Allocation *allocation,
383 VkDeviceSize size,
384 int value,
385 VkMemoryPropertyFlags memoryPropertyFlags);
386
387 angle::Result InitMappableDeviceMemory(Context *context,
388 DeviceMemory *deviceMemory,
389 VkDeviceSize size,
390 int value,
391 VkMemoryPropertyFlags memoryPropertyFlags);
392
393 angle::Result AllocateBufferMemory(Context *context,
394 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
395 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
396 const void *extraAllocationInfo,
397 Buffer *buffer,
398 DeviceMemory *deviceMemoryOut,
399 VkDeviceSize *sizeOut);
400
401 angle::Result AllocateImageMemory(Context *context,
402 VkMemoryPropertyFlags memoryPropertyFlags,
403 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
404 const void *extraAllocationInfo,
405 Image *image,
406 DeviceMemory *deviceMemoryOut,
407 VkDeviceSize *sizeOut);
408
409 angle::Result AllocateImageMemoryWithRequirements(Context *context,
410 VkMemoryPropertyFlags memoryPropertyFlags,
411 const VkMemoryRequirements &memoryRequirements,
412 const void *extraAllocationInfo,
413 Image *image,
414 DeviceMemory *deviceMemoryOut);
415
416 angle::Result AllocateBufferMemoryWithRequirements(Context *context,
417 VkMemoryPropertyFlags memoryPropertyFlags,
418 const VkMemoryRequirements &memoryRequirements,
419 const void *extraAllocationInfo,
420 Buffer *buffer,
421 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
422 DeviceMemory *deviceMemoryOut);
423
424 using ShaderAndSerial = ObjectAndSerial<ShaderModule>;
425
426 angle::Result InitShaderAndSerial(Context *context,
427 ShaderAndSerial *shaderAndSerial,
428 const uint32_t *shaderCode,
429 size_t shaderCodeSize);
430
431 gl::TextureType Get2DTextureType(uint32_t layerCount, GLint samples);
432
433 enum class RecordingMode
434 {
435 Start,
436 Append,
437 };
438
439 // Helper class to handle RAII patterns for initialization. Requires that T have a destroy method
440 // that takes a VkDevice and returns void.
441 template <typename T>
442 class DeviceScoped final : angle::NonCopyable
443 {
444 public:
445 DeviceScoped(VkDevice device) : mDevice(device) {}
446 ~DeviceScoped() { mVar.destroy(mDevice); }
447
448 const T &get() const { return mVar; }
449 T &get() { return mVar; }
450
451 T &&release() { return std::move(mVar); }
452
453 private:
454 VkDevice mDevice;
455 T mVar;
456 };
457
458 // Similar to DeviceScoped, but releases objects instead of destroying them. Requires that T have a
459 // release method that takes a ContextVk * and returns void.
460 template <typename T>
461 class ContextScoped final : angle::NonCopyable
462 {
463 public:
464 ContextScoped(ContextVk *contextVk) : mContextVk(contextVk) {}
465 ~ContextScoped() { mVar.release(mContextVk); }
466
467 const T &get() const { return mVar; }
468 T &get() { return mVar; }
469
470 T &&release() { return std::move(mVar); }
471
472 private:
473 ContextVk *mContextVk;
474 T mVar;
475 };
476
477 template <typename T>
478 class RendererScoped final : angle::NonCopyable
479 {
480 public:
481 RendererScoped(RendererVk *renderer) : mRenderer(renderer) {}
482 ~RendererScoped() { mVar.release(mRenderer); }
483
484 const T &get() const { return mVar; }
485 T &get() { return mVar; }
486
487 T &&release() { return std::move(mVar); }
488
489 private:
490 RendererVk *mRenderer;
491 T mVar;
492 };
493
494 // This is a very simple RefCount class that has no autoreleasing. Used in the descriptor set and
495 // pipeline layout caches.
496 template <typename T>
497 class RefCounted : angle::NonCopyable
498 {
499 public:
500 RefCounted() : mRefCount(0) {}
501 explicit RefCounted(T &&newObject) : mRefCount(0), mObject(std::move(newObject)) {}
502 ~RefCounted() { ASSERT(mRefCount == 0 && !mObject.valid()); }
503
504 RefCounted(RefCounted &©) : mRefCount(copy.mRefCount), mObject(std::move(copy.mObject))
505 {
506 ASSERT(this != ©);
507 copy.mRefCount = 0;
508 }
509
510 RefCounted &operator=(RefCounted &&rhs)
511 {
512 std::swap(mRefCount, rhs.mRefCount);
513 mObject = std::move(rhs.mObject);
514 return *this;
515 }
516
517 void addRef()
518 {
519 ASSERT(mRefCount != std::numeric_limits<uint32_t>::max());
520 mRefCount++;
521 }
522
523 void releaseRef()
524 {
525 ASSERT(isReferenced());
526 mRefCount--;
527 }
528
529 bool isReferenced() const { return mRefCount != 0; }
530
531 T &get() { return mObject; }
532 const T &get() const { return mObject; }
533
534 // A debug function to validate that the reference count is as expected used for assertions.
535 bool isRefCountAsExpected(uint32_t expectedRefCount) { return mRefCount == expectedRefCount; }
536
537 private:
538 uint32_t mRefCount;
539 T mObject;
540 };
541
542 template <typename T>
543 class BindingPointer final : angle::NonCopyable
544 {
545 public:
546 BindingPointer() = default;
547 ~BindingPointer() { reset(); }
548
549 BindingPointer(BindingPointer &&other)
550 {
551 set(other.mRefCounted);
552 other.reset();
553 }
554
555 void set(RefCounted<T> *refCounted)
556 {
557 if (mRefCounted)
558 {
559 mRefCounted->releaseRef();
560 }
561
562 mRefCounted = refCounted;
563
564 if (mRefCounted)
565 {
566 mRefCounted->addRef();
567 }
568 }
569
570 void reset() { set(nullptr); }
571
572 T &get() { return mRefCounted->get(); }
573 const T &get() const { return mRefCounted->get(); }
574
575 bool valid() const { return mRefCounted != nullptr; }
576
577 private:
578 RefCounted<T> *mRefCounted = nullptr;
579 };
580
581 // Helper class to share ref-counted Vulkan objects. Requires that T have a destroy method
582 // that takes a VkDevice and returns void.
583 template <typename T>
584 class Shared final : angle::NonCopyable
585 {
586 public:
587 Shared() : mRefCounted(nullptr) {}
588 ~Shared() { ASSERT(mRefCounted == nullptr); }
589
590 Shared(Shared &&other) { *this = std::move(other); }
591 Shared &operator=(Shared &&other)
592 {
593 ASSERT(this != &other);
594 mRefCounted = other.mRefCounted;
595 other.mRefCounted = nullptr;
596 return *this;
597 }
598
599 void set(VkDevice device, RefCounted<T> *refCounted)
600 {
601 if (mRefCounted)
602 {
603 mRefCounted->releaseRef();
604 if (!mRefCounted->isReferenced())
605 {
606 mRefCounted->get().destroy(device);
607 SafeDelete(mRefCounted);
608 }
609 }
610
611 mRefCounted = refCounted;
612
613 if (mRefCounted)
614 {
615 mRefCounted->addRef();
616 }
617 }
618
619 void setUnreferenced(RefCounted<T> *refCounted)
620 {
621 ASSERT(!mRefCounted);
622 ASSERT(refCounted);
623
624 mRefCounted = refCounted;
625 mRefCounted->addRef();
626 }
627
628 void assign(VkDevice device, T &&newObject)
629 {
630 set(device, new RefCounted<T>(std::move(newObject)));
631 }
632
633 void copy(VkDevice device, const Shared<T> &other) { set(device, other.mRefCounted); }
634
635 void copyUnreferenced(const Shared<T> &other) { setUnreferenced(other.mRefCounted); }
636
637 void reset(VkDevice device) { set(device, nullptr); }
638
639 template <typename RecyclerT>
640 void resetAndRecycle(RecyclerT *recycler)
641 {
642 if (mRefCounted)
643 {
644 mRefCounted->releaseRef();
645 if (!mRefCounted->isReferenced())
646 {
647 ASSERT(mRefCounted->get().valid());
648 recycler->recycle(std::move(mRefCounted->get()));
649 SafeDelete(mRefCounted);
650 }
651
652 mRefCounted = nullptr;
653 }
654 }
655
656 template <typename OnRelease>
657 void resetAndRelease(OnRelease *onRelease)
658 {
659 if (mRefCounted)
660 {
661 mRefCounted->releaseRef();
662 if (!mRefCounted->isReferenced())
663 {
664 ASSERT(mRefCounted->get().valid());
665 (*onRelease)(std::move(mRefCounted->get()));
666 SafeDelete(mRefCounted);
667 }
668
669 mRefCounted = nullptr;
670 }
671 }
672
673 bool isReferenced() const
674 {
675 // If reference is zero, the object should have been deleted. I.e. if the object is not
676 // nullptr, it should have a reference.
677 ASSERT(!mRefCounted || mRefCounted->isReferenced());
678 return mRefCounted != nullptr;
679 }
680
681 T &get()
682 {
683 ASSERT(mRefCounted && mRefCounted->isReferenced());
684 return mRefCounted->get();
685 }
686 const T &get() const
687 {
688 ASSERT(mRefCounted && mRefCounted->isReferenced());
689 return mRefCounted->get();
690 }
691
692 private:
693 RefCounted<T> *mRefCounted;
694 };
695
696 template <typename T>
697 class Recycler final : angle::NonCopyable
698 {
699 public:
700 Recycler() = default;
701
702 void recycle(T &&garbageObject) { mObjectFreeList.emplace_back(std::move(garbageObject)); }
703
704 void fetch(T *outObject)
705 {
706 ASSERT(!empty());
707 *outObject = std::move(mObjectFreeList.back());
708 mObjectFreeList.pop_back();
709 }
710
711 void destroy(VkDevice device)
712 {
713 for (T &object : mObjectFreeList)
714 {
715 object.destroy(device);
716 }
717 }
718
719 bool empty() const { return mObjectFreeList.empty(); }
720
721 private:
722 std::vector<T> mObjectFreeList;
723 };
724
725 ANGLE_ENABLE_STRUCT_PADDING_WARNINGS
726 struct SpecializationConstants final
727 {
728 VkBool32 lineRasterEmulation;
729 uint32_t surfaceRotation;
730 float drawableWidth;
731 float drawableHeight;
732 };
733 ANGLE_DISABLE_STRUCT_PADDING_WARNINGS
734
735 template <typename T>
736 using SpecializationConstantMap = angle::PackedEnumMap<sh::vk::SpecializationConstantId, T>;
737
738 void MakeDebugUtilsLabel(GLenum source, const char *marker, VkDebugUtilsLabelEXT *label);
739
740 constexpr size_t kUnpackedDepthIndex = gl::IMPLEMENTATION_MAX_DRAW_BUFFERS;
741 constexpr size_t kUnpackedStencilIndex = gl::IMPLEMENTATION_MAX_DRAW_BUFFERS + 1;
742
743 class ClearValuesArray final
744 {
745 public:
746 ClearValuesArray();
747 ~ClearValuesArray();
748
749 ClearValuesArray(const ClearValuesArray &other);
750 ClearValuesArray &operator=(const ClearValuesArray &rhs);
751
752 void store(uint32_t index, VkImageAspectFlags aspectFlags, const VkClearValue &clearValue);
753 void storeNoDepthStencil(uint32_t index, const VkClearValue &clearValue);
754
755 void reset(size_t index)
756 {
757 mValues[index] = {};
758 mEnabled.reset(index);
759 }
760
761 bool test(size_t index) const { return mEnabled.test(index); }
762 bool testDepth() const { return mEnabled.test(kUnpackedDepthIndex); }
763 bool testStencil() const { return mEnabled.test(kUnpackedStencilIndex); }
764 gl::DrawBufferMask getColorMask() const;
765
766 const VkClearValue &operator[](size_t index) const { return mValues[index]; }
767
768 float getDepthValue() const { return mValues[kUnpackedDepthIndex].depthStencil.depth; }
769 uint32_t getStencilValue() const { return mValues[kUnpackedStencilIndex].depthStencil.stencil; }
770
771 const VkClearValue *data() const { return mValues.data(); }
772 bool empty() const { return mEnabled.none(); }
773 bool any() const { return mEnabled.any(); }
774
775 private:
776 gl::AttachmentArray<VkClearValue> mValues;
777 gl::AttachmentsMask mEnabled;
778 };
779
780 // Defines Serials for Vulkan objects.
781 #define ANGLE_VK_SERIAL_OP(X) \
782 X(Buffer) \
783 X(Image) \
784 X(ImageOrBufferView) \
785 X(Sampler)
786
787 #define ANGLE_DEFINE_VK_SERIAL_TYPE(Type) \
788 class Type##Serial \
789 { \
790 public: \
791 constexpr Type##Serial() : mSerial(kInvalid) {} \
792 constexpr explicit Type##Serial(uint32_t serial) : mSerial(serial) {} \
793 \
794 constexpr bool operator==(const Type##Serial &other) const \
795 { \
796 ASSERT(mSerial != kInvalid); \
797 ASSERT(other.mSerial != kInvalid); \
798 return mSerial == other.mSerial; \
799 } \
800 constexpr bool operator!=(const Type##Serial &other) const \
801 { \
802 ASSERT(mSerial != kInvalid); \
803 ASSERT(other.mSerial != kInvalid); \
804 return mSerial != other.mSerial; \
805 } \
806 constexpr uint32_t getValue() const { return mSerial; } \
807 constexpr bool valid() const { return mSerial != kInvalid; } \
808 \
809 private: \
810 uint32_t mSerial; \
811 static constexpr uint32_t kInvalid = 0; \
812 }; \
813 static constexpr Type##Serial kInvalid##Type##Serial = Type##Serial();
814
815 ANGLE_VK_SERIAL_OP(ANGLE_DEFINE_VK_SERIAL_TYPE)
816
817 #define ANGLE_DECLARE_GEN_VK_SERIAL(Type) Type##Serial generate##Type##Serial();
818
819 class ResourceSerialFactory final : angle::NonCopyable
820 {
821 public:
822 ResourceSerialFactory();
823 ~ResourceSerialFactory();
824
825 ANGLE_VK_SERIAL_OP(ANGLE_DECLARE_GEN_VK_SERIAL)
826
827 private:
828 uint32_t issueSerial();
829
830 // Kept atomic so it can be accessed from multiple Context threads at once.
831 std::atomic<uint32_t> mCurrentUniqueSerial;
832 };
833
834 #if defined(ANGLE_ENABLE_PERF_COUNTER_OUTPUT)
835 constexpr bool kOutputCumulativePerfCounters = ANGLE_ENABLE_PERF_COUNTER_OUTPUT;
836 #else
837 constexpr bool kOutputCumulativePerfCounters = false;
838 #endif
839
840 // Performance and resource counters.
841 struct RenderPassPerfCounters
842 {
843 // load/storeOps. Includes ops for resolve attachment. Maximum value = 2.
844 uint8_t depthClears;
845 uint8_t depthLoads;
846 uint8_t depthStores;
847 uint8_t stencilClears;
848 uint8_t stencilLoads;
849 uint8_t stencilStores;
850 // Number of unresolve and resolve operations. Maximum value for color =
851 // gl::IMPLEMENTATION_MAX_DRAW_BUFFERS and for depth/stencil = 1 each.
852 uint8_t colorAttachmentUnresolves;
853 uint8_t colorAttachmentResolves;
854 uint8_t depthAttachmentUnresolves;
855 uint8_t depthAttachmentResolves;
856 uint8_t stencilAttachmentUnresolves;
857 uint8_t stencilAttachmentResolves;
858 // Whether the depth/stencil attachment is using a read-only layout.
859 uint8_t readOnlyDepthStencil;
860 };
861
862 struct PerfCounters
863 {
864 uint32_t primaryBuffers;
865 uint32_t renderPasses;
866 uint32_t writeDescriptorSets;
867 uint32_t flushedOutsideRenderPassCommandBuffers;
868 uint32_t resolveImageCommands;
869 uint32_t depthClears;
870 uint32_t depthLoads;
871 uint32_t depthStores;
872 uint32_t stencilClears;
873 uint32_t stencilLoads;
874 uint32_t stencilStores;
875 uint32_t colorAttachmentUnresolves;
876 uint32_t depthAttachmentUnresolves;
877 uint32_t stencilAttachmentUnresolves;
878 uint32_t colorAttachmentResolves;
879 uint32_t depthAttachmentResolves;
880 uint32_t stencilAttachmentResolves;
881 uint32_t readOnlyDepthStencilRenderPasses;
882 uint32_t descriptorSetAllocations;
883 uint32_t shaderBuffersDescriptorSetCacheHits;
884 uint32_t shaderBuffersDescriptorSetCacheMisses;
885 };
886
887 // A Vulkan image level index.
888 using LevelIndex = gl::LevelIndexWrapper<uint32_t>;
889
890 // Ensure viewport is within Vulkan requirements
891 void ClampViewport(VkViewport *viewport);
892
893 } // namespace vk
894
895 #if !defined(ANGLE_SHARED_LIBVULKAN)
896 // Lazily load entry points for each extension as necessary.
897 void InitDebugUtilsEXTFunctions(VkInstance instance);
898 void InitDebugReportEXTFunctions(VkInstance instance);
899 void InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance);
900 void InitTransformFeedbackEXTFunctions(VkDevice device);
901 void InitSamplerYcbcrKHRFunctions(VkDevice device);
902 void InitRenderPass2KHRFunctions(VkDevice device);
903
904 # if defined(ANGLE_PLATFORM_FUCHSIA)
905 // VK_FUCHSIA_imagepipe_surface
906 void InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance);
907 # endif
908
909 # if defined(ANGLE_PLATFORM_ANDROID)
910 // VK_ANDROID_external_memory_android_hardware_buffer
911 void InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance);
912 # endif
913
914 # if defined(ANGLE_PLATFORM_GGP)
915 // VK_GGP_stream_descriptor_surface
916 void InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance);
917 # endif // defined(ANGLE_PLATFORM_GGP)
918
919 // VK_KHR_external_semaphore_fd
920 void InitExternalSemaphoreFdFunctions(VkInstance instance);
921
922 // VK_EXT_external_memory_host
923 void InitExternalMemoryHostFunctions(VkInstance instance);
924
925 // VK_KHR_external_fence_capabilities
926 void InitExternalFenceCapabilitiesFunctions(VkInstance instance);
927
928 // VK_KHR_get_memory_requirements2
929 void InitGetMemoryRequirements2KHRFunctions(VkDevice device);
930
931 // VK_KHR_bind_memory2
932 void InitBindMemory2KHRFunctions(VkDevice device);
933
934 // VK_KHR_external_fence_fd
935 void InitExternalFenceFdFunctions(VkInstance instance);
936
937 // VK_KHR_external_semaphore_capabilities
938 void InitExternalSemaphoreCapabilitiesFunctions(VkInstance instance);
939
940 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
941
942 GLenum CalculateGenerateMipmapFilter(ContextVk *contextVk, const vk::Format &format);
943 size_t PackSampleCount(GLint sampleCount);
944
945 namespace gl_vk
946 {
947 VkRect2D GetRect(const gl::Rectangle &source);
948 VkFilter GetFilter(const GLenum filter);
949 VkSamplerMipmapMode GetSamplerMipmapMode(const GLenum filter);
950 VkSamplerAddressMode GetSamplerAddressMode(const GLenum wrap);
951 VkPrimitiveTopology GetPrimitiveTopology(gl::PrimitiveMode mode);
952 VkCullModeFlagBits GetCullMode(const gl::RasterizerState &rasterState);
953 VkFrontFace GetFrontFace(GLenum frontFace, bool invertCullFace);
954 VkSampleCountFlagBits GetSamples(GLint sampleCount);
955 VkComponentSwizzle GetSwizzle(const GLenum swizzle);
956 VkCompareOp GetCompareOp(const GLenum compareFunc);
957
958 constexpr gl::ShaderMap<VkShaderStageFlagBits> kShaderStageMap = {
959 {gl::ShaderType::Vertex, VK_SHADER_STAGE_VERTEX_BIT},
960 {gl::ShaderType::TessControl, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT},
961 {gl::ShaderType::TessEvaluation, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT},
962 {gl::ShaderType::Fragment, VK_SHADER_STAGE_FRAGMENT_BIT},
963 {gl::ShaderType::Geometry, VK_SHADER_STAGE_GEOMETRY_BIT},
964 {gl::ShaderType::Compute, VK_SHADER_STAGE_COMPUTE_BIT},
965 };
966
967 void GetOffset(const gl::Offset &glOffset, VkOffset3D *vkOffset);
968 void GetExtent(const gl::Extents &glExtent, VkExtent3D *vkExtent);
969 VkImageType GetImageType(gl::TextureType textureType);
970 VkImageViewType GetImageViewType(gl::TextureType textureType);
971 VkColorComponentFlags GetColorComponentFlags(bool red, bool green, bool blue, bool alpha);
972 VkShaderStageFlags GetShaderStageFlags(gl::ShaderBitSet activeShaders);
973
974 void GetViewport(const gl::Rectangle &viewport,
975 float nearPlane,
976 float farPlane,
977 bool invertViewport,
978 bool upperLeftOrigin,
979 GLint renderAreaHeight,
980 VkViewport *viewportOut);
981
982 void GetExtentsAndLayerCount(gl::TextureType textureType,
983 const gl::Extents &extents,
984 VkExtent3D *extentsOut,
985 uint32_t *layerCountOut);
986
987 vk::LevelIndex GetLevelIndex(gl::LevelIndex levelGL, gl::LevelIndex baseLevel);
988 } // namespace gl_vk
989
990 namespace vk_gl
991 {
992 // The Vulkan back-end will not support a sample count of 1, because of a Vulkan specification
993 // restriction:
994 //
995 // If the image was created with VkImageCreateInfo::samples equal to VK_SAMPLE_COUNT_1_BIT, the
996 // instruction must: have MS = 0.
997 //
998 // This restriction was tracked in http://anglebug.com/4196 and Khronos-private Vulkan
999 // specification issue https://gitlab.khronos.org/vulkan/vulkan/issues/1925.
1000 //
1001 // In addition, the Vulkan back-end will not support sample counts of 32 or 64, since there are no
1002 // standard sample locations for those sample counts.
1003 constexpr unsigned int kSupportedSampleCounts = (VK_SAMPLE_COUNT_2_BIT | VK_SAMPLE_COUNT_4_BIT |
1004 VK_SAMPLE_COUNT_8_BIT | VK_SAMPLE_COUNT_16_BIT);
1005
1006 // Find set bits in sampleCounts and add the corresponding sample count to the set.
1007 void AddSampleCounts(VkSampleCountFlags sampleCounts, gl::SupportedSampleSet *outSet);
1008 // Return the maximum sample count with a bit set in |sampleCounts|.
1009 GLuint GetMaxSampleCount(VkSampleCountFlags sampleCounts);
1010 // Return a supported sample count that's at least as large as the requested one.
1011 GLuint GetSampleCount(VkSampleCountFlags supportedCounts, GLuint requestedCount);
1012
1013 gl::LevelIndex GetLevelIndex(vk::LevelIndex levelVk, gl::LevelIndex baseLevel);
1014 } // namespace vk_gl
1015
1016 } // namespace rx
1017
1018 #define ANGLE_VK_TRY(context, command) \
1019 do \
1020 { \
1021 auto ANGLE_LOCAL_VAR = command; \
1022 if (ANGLE_UNLIKELY(ANGLE_LOCAL_VAR != VK_SUCCESS)) \
1023 { \
1024 (context)->handleError(ANGLE_LOCAL_VAR, __FILE__, ANGLE_FUNCTION, __LINE__); \
1025 return angle::Result::Stop; \
1026 } \
1027 } while (0)
1028
1029 #define ANGLE_VK_CHECK(context, test, error) ANGLE_VK_TRY(context, test ? VK_SUCCESS : error)
1030
1031 #define ANGLE_VK_CHECK_MATH(context, result) \
1032 ANGLE_VK_CHECK(context, result, VK_ERROR_VALIDATION_FAILED_EXT)
1033
1034 #define ANGLE_VK_CHECK_ALLOC(context, result) \
1035 ANGLE_VK_CHECK(context, result, VK_ERROR_OUT_OF_HOST_MEMORY)
1036
1037 #define ANGLE_VK_UNREACHABLE(context) \
1038 UNREACHABLE(); \
1039 ANGLE_VK_CHECK(context, false, VK_ERROR_FEATURE_NOT_PRESENT)
1040
1041 // NVIDIA uses special formatting for the driver version:
1042 // Major: 10
1043 // Minor: 8
1044 // Sub-minor: 8
1045 // patch: 6
1046 #define ANGLE_VK_VERSION_MAJOR_NVIDIA(version) (((uint32_t)(version) >> 22) & 0x3ff)
1047 #define ANGLE_VK_VERSION_MINOR_NVIDIA(version) (((uint32_t)(version) >> 14) & 0xff)
1048 #define ANGLE_VK_VERSION_SUB_MINOR_NVIDIA(version) (((uint32_t)(version) >> 6) & 0xff)
1049 #define ANGLE_VK_VERSION_PATCH_NVIDIA(version) ((uint32_t)(version)&0x3f)
1050
1051 // Similarly for Intel on Windows:
1052 // Major: 18
1053 // Minor: 14
1054 #define ANGLE_VK_VERSION_MAJOR_WIN_INTEL(version) (((uint32_t)(version) >> 14) & 0x3ffff)
1055 #define ANGLE_VK_VERSION_MINOR_WIN_INTEL(version) ((uint32_t)(version)&0x3fff)
1056
1057 #endif // LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
1058