1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_utils:
7 // Helper functions for the Vulkan Renderer.
8 //
9
10 #ifndef LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
11 #define LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
12
13 #include <atomic>
14 #include <limits>
15 #include <queue>
16
17 #include "GLSLANG/ShaderLang.h"
18 #include "common/FixedVector.h"
19 #include "common/Optional.h"
20 #include "common/PackedEnums.h"
21 #include "common/backtrace_utils.h"
22 #include "common/debug.h"
23 #include "libANGLE/Error.h"
24 #include "libANGLE/Observer.h"
25 #include "libANGLE/angletypes.h"
26 #include "libANGLE/renderer/serial_utils.h"
27 #include "libANGLE/renderer/vulkan/SecondaryCommandBuffer.h"
28 #include "libANGLE/renderer/vulkan/SecondaryCommandPool.h"
29 #include "libANGLE/renderer/vulkan/VulkanSecondaryCommandBuffer.h"
30 #include "libANGLE/renderer/vulkan/vk_wrapper.h"
31 #include "platform/autogen/FeaturesVk_autogen.h"
32 #include "vulkan/vulkan_fuchsia_ext.h"
33
34 #define ANGLE_GL_OBJECTS_X(PROC) \
35 PROC(Buffer) \
36 PROC(Context) \
37 PROC(Framebuffer) \
38 PROC(MemoryObject) \
39 PROC(Overlay) \
40 PROC(Program) \
41 PROC(ProgramPipeline) \
42 PROC(Query) \
43 PROC(Renderbuffer) \
44 PROC(Sampler) \
45 PROC(Semaphore) \
46 PROC(Texture) \
47 PROC(TransformFeedback) \
48 PROC(VertexArray)
49
50 #define ANGLE_PRE_DECLARE_OBJECT(OBJ) class OBJ;
51
52 namespace egl
53 {
54 class Display;
55 class Image;
56 class ShareGroup;
57 } // namespace egl
58
59 namespace gl
60 {
61 class MockOverlay;
62 class ProgramExecutable;
63 struct RasterizerState;
64 struct SwizzleState;
65 struct VertexAttribute;
66 class VertexBinding;
67
68 ANGLE_GL_OBJECTS_X(ANGLE_PRE_DECLARE_OBJECT)
69 } // namespace gl
70
71 #define ANGLE_PRE_DECLARE_VK_OBJECT(OBJ) class OBJ##Vk;
72
73 namespace rx
74 {
75 class DisplayVk;
76 class ImageVk;
77 class ProgramExecutableVk;
78 class RenderbufferVk;
79 class RenderTargetVk;
80 class RendererVk;
81 class RenderPassCache;
82 class ShareGroupVk;
83 } // namespace rx
84
85 namespace angle
86 {
87 egl::Error ToEGL(Result result, EGLint errorCode);
88 } // namespace angle
89
90 namespace rx
91 {
92 ANGLE_GL_OBJECTS_X(ANGLE_PRE_DECLARE_VK_OBJECT)
93
94 const char *VulkanResultString(VkResult result);
95
96 constexpr size_t kMaxVulkanLayers = 20;
97 using VulkanLayerVector = angle::FixedVector<const char *, kMaxVulkanLayers>;
98
99 // Verify that validation layers are available.
100 bool GetAvailableValidationLayers(const std::vector<VkLayerProperties> &layerProps,
101 bool mustHaveLayers,
102 VulkanLayerVector *enabledLayerNames);
103
104 enum class TextureDimension
105 {
106 TEX_2D,
107 TEX_CUBE,
108 TEX_3D,
109 TEX_2D_ARRAY,
110 };
111
112 enum class BufferUsageType
113 {
114 Static = 0,
115 Dynamic = 1,
116 InvalidEnum = 2,
117 EnumCount = InvalidEnum,
118 };
119
120 // A maximum offset of 4096 covers almost every Vulkan driver on desktop (80%) and mobile (99%). The
121 // next highest values to meet native drivers are 16 bits or 32 bits.
122 constexpr uint32_t kAttributeOffsetMaxBits = 15;
123 constexpr uint32_t kInvalidMemoryTypeIndex = UINT32_MAX;
124 constexpr uint32_t kInvalidMemoryHeapIndex = UINT32_MAX;
125
126 namespace vk
127 {
128
129 // Used for memory allocation tracking.
130 enum class MemoryAllocationType;
131
132 // A packed attachment index interface with vulkan API
133 class PackedAttachmentIndex final
134 {
135 public:
PackedAttachmentIndex(uint32_t index)136 explicit constexpr PackedAttachmentIndex(uint32_t index) : mAttachmentIndex(index) {}
137 constexpr PackedAttachmentIndex(const PackedAttachmentIndex &other) = default;
138 constexpr PackedAttachmentIndex &operator=(const PackedAttachmentIndex &other) = default;
139
get()140 constexpr uint32_t get() const { return mAttachmentIndex; }
141 PackedAttachmentIndex &operator++()
142 {
143 ++mAttachmentIndex;
144 return *this;
145 }
146 constexpr bool operator==(const PackedAttachmentIndex &other) const
147 {
148 return mAttachmentIndex == other.mAttachmentIndex;
149 }
150 constexpr bool operator!=(const PackedAttachmentIndex &other) const
151 {
152 return mAttachmentIndex != other.mAttachmentIndex;
153 }
154 constexpr bool operator<(const PackedAttachmentIndex &other) const
155 {
156 return mAttachmentIndex < other.mAttachmentIndex;
157 }
158
159 private:
160 uint32_t mAttachmentIndex;
161 };
162 using PackedAttachmentCount = PackedAttachmentIndex;
163 static constexpr PackedAttachmentIndex kAttachmentIndexInvalid = PackedAttachmentIndex(-1);
164 static constexpr PackedAttachmentIndex kAttachmentIndexZero = PackedAttachmentIndex(0);
165
166 // Prepend ptr to the pNext chain at chainStart
167 template <typename VulkanStruct1, typename VulkanStruct2>
AddToPNextChain(VulkanStruct1 * chainStart,VulkanStruct2 * ptr)168 void AddToPNextChain(VulkanStruct1 *chainStart, VulkanStruct2 *ptr)
169 {
170 ASSERT(ptr->pNext == nullptr);
171
172 VkBaseOutStructure *localPtr = reinterpret_cast<VkBaseOutStructure *>(chainStart);
173 ptr->pNext = localPtr->pNext;
174 localPtr->pNext = reinterpret_cast<VkBaseOutStructure *>(ptr);
175 }
176
177 // Append ptr to the end of the chain
178 template <typename VulkanStruct1, typename VulkanStruct2>
AppendToPNextChain(VulkanStruct1 * chainStart,VulkanStruct2 * ptr)179 void AppendToPNextChain(VulkanStruct1 *chainStart, VulkanStruct2 *ptr)
180 {
181 if (!ptr)
182 {
183 return;
184 }
185
186 VkBaseOutStructure *endPtr = reinterpret_cast<VkBaseOutStructure *>(chainStart);
187 while (endPtr->pNext)
188 {
189 endPtr = endPtr->pNext;
190 }
191 endPtr->pNext = reinterpret_cast<VkBaseOutStructure *>(ptr);
192 }
193
194 class QueueSerialIndexAllocator final
195 {
196 public:
QueueSerialIndexAllocator()197 QueueSerialIndexAllocator() : mLargestIndexEverAllocated(kInvalidQueueSerialIndex)
198 {
199 // Start with every index is free
200 mFreeIndexBitSetArray.set();
201 ASSERT(mFreeIndexBitSetArray.all());
202 }
allocate()203 SerialIndex allocate()
204 {
205 std::lock_guard<std::mutex> lock(mMutex);
206 if (mFreeIndexBitSetArray.none())
207 {
208 ERR() << "Run out of queue serial index. All " << kMaxQueueSerialIndexCount
209 << " indices are used.";
210 return kInvalidQueueSerialIndex;
211 }
212 SerialIndex index = static_cast<SerialIndex>(mFreeIndexBitSetArray.first());
213 ASSERT(index < kMaxQueueSerialIndexCount);
214 mFreeIndexBitSetArray.reset(index);
215 mLargestIndexEverAllocated = (~mFreeIndexBitSetArray).last();
216 return index;
217 }
218
release(SerialIndex index)219 void release(SerialIndex index)
220 {
221 std::lock_guard<std::mutex> lock(mMutex);
222 ASSERT(index <= mLargestIndexEverAllocated);
223 ASSERT(!mFreeIndexBitSetArray.test(index));
224 mFreeIndexBitSetArray.set(index);
225 // mLargestIndexEverAllocated is for optimization. Even if we released queueIndex, we may
226 // still have resources still have serial the index. Thus do not decrement
227 // mLargestIndexEverAllocated here. The only downside is that we may get into slightly less
228 // optimal code path in GetBatchCountUpToSerials.
229 }
230
getLargestIndexEverAllocated()231 size_t getLargestIndexEverAllocated() const
232 {
233 return mLargestIndexEverAllocated.load(std::memory_order_consume);
234 }
235
236 private:
237 angle::BitSetArray<kMaxQueueSerialIndexCount> mFreeIndexBitSetArray;
238 std::atomic<size_t> mLargestIndexEverAllocated;
239 std::mutex mMutex;
240 };
241
242 class [[nodiscard]] ScopedQueueSerialIndex final : angle::NonCopyable
243 {
244 public:
ScopedQueueSerialIndex()245 ScopedQueueSerialIndex() : mIndex(kInvalidQueueSerialIndex), mIndexAllocator(nullptr) {}
~ScopedQueueSerialIndex()246 ~ScopedQueueSerialIndex()
247 {
248 if (mIndex != kInvalidQueueSerialIndex)
249 {
250 ASSERT(mIndexAllocator != nullptr);
251 mIndexAllocator->release(mIndex);
252 }
253 }
254
init(SerialIndex index,QueueSerialIndexAllocator * indexAllocator)255 void init(SerialIndex index, QueueSerialIndexAllocator *indexAllocator)
256 {
257 ASSERT(mIndex == kInvalidQueueSerialIndex);
258 ASSERT(index != kInvalidQueueSerialIndex);
259 ASSERT(indexAllocator != nullptr);
260 mIndex = index;
261 mIndexAllocator = indexAllocator;
262 }
263
get()264 SerialIndex get() const { return mIndex; }
265
266 private:
267 SerialIndex mIndex;
268 QueueSerialIndexAllocator *mIndexAllocator;
269 };
270
271 // Abstracts error handling. Implemented by both ContextVk for GL and DisplayVk for EGL errors.
272 class Context : angle::NonCopyable
273 {
274 public:
275 Context(RendererVk *renderer);
276 virtual ~Context();
277
278 virtual void handleError(VkResult result,
279 const char *file,
280 const char *function,
281 unsigned int line) = 0;
282 VkDevice getDevice() const;
getRenderer()283 RendererVk *getRenderer() const { return mRenderer; }
284 const angle::FeaturesVk &getFeatures() const;
285
getPerfCounters()286 const angle::VulkanPerfCounters &getPerfCounters() const { return mPerfCounters; }
getPerfCounters()287 angle::VulkanPerfCounters &getPerfCounters() { return mPerfCounters; }
288
289 protected:
290 RendererVk *const mRenderer;
291 angle::VulkanPerfCounters mPerfCounters;
292 };
293
294 class RenderPassDesc;
295
296 #if ANGLE_USE_CUSTOM_VULKAN_OUTSIDE_RENDER_PASS_CMD_BUFFERS
297 using OutsideRenderPassCommandBuffer = priv::SecondaryCommandBuffer;
298 #else
299 using OutsideRenderPassCommandBuffer = VulkanSecondaryCommandBuffer;
300 #endif
301 #if ANGLE_USE_CUSTOM_VULKAN_RENDER_PASS_CMD_BUFFERS
302 using RenderPassCommandBuffer = priv::SecondaryCommandBuffer;
303 #else
304 using RenderPassCommandBuffer = VulkanSecondaryCommandBuffer;
305 #endif
306
307 struct SecondaryCommandPools
308 {
309 SecondaryCommandPool outsideRenderPassPool;
310 SecondaryCommandPool renderPassPool;
311 };
312
313 VkImageAspectFlags GetDepthStencilAspectFlags(const angle::Format &format);
314 VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format);
315
316 template <typename T>
317 struct ImplTypeHelper;
318
319 // clang-format off
320 #define ANGLE_IMPL_TYPE_HELPER_GL(OBJ) \
321 template<> \
322 struct ImplTypeHelper<gl::OBJ> \
323 { \
324 using ImplType = OBJ##Vk; \
325 };
326 // clang-format on
327
328 ANGLE_GL_OBJECTS_X(ANGLE_IMPL_TYPE_HELPER_GL)
329
330 template <>
331 struct ImplTypeHelper<gl::MockOverlay>
332 {
333 using ImplType = OverlayVk;
334 };
335
336 template <>
337 struct ImplTypeHelper<egl::Display>
338 {
339 using ImplType = DisplayVk;
340 };
341
342 template <>
343 struct ImplTypeHelper<egl::Image>
344 {
345 using ImplType = ImageVk;
346 };
347
348 template <>
349 struct ImplTypeHelper<egl::ShareGroup>
350 {
351 using ImplType = ShareGroupVk;
352 };
353
354 template <typename T>
355 using GetImplType = typename ImplTypeHelper<T>::ImplType;
356
357 template <typename T>
358 GetImplType<T> *GetImpl(const T *glObject)
359 {
360 return GetImplAs<GetImplType<T>>(glObject);
361 }
362
363 template <typename T>
364 GetImplType<T> *SafeGetImpl(const T *glObject)
365 {
366 return SafeGetImplAs<GetImplType<T>>(glObject);
367 }
368
369 template <>
370 inline OverlayVk *GetImpl(const gl::MockOverlay *glObject)
371 {
372 return nullptr;
373 }
374
375 // Reference to a deleted object. The object is due to be destroyed at some point in the future.
376 // |mHandleType| determines the type of the object and which destroy function should be called.
377 class GarbageObject
378 {
379 public:
380 GarbageObject();
381 GarbageObject(GarbageObject &&other);
382 GarbageObject &operator=(GarbageObject &&rhs);
383
384 bool valid() const { return mHandle != VK_NULL_HANDLE; }
385 void destroy(RendererVk *renderer);
386
387 template <typename DerivedT, typename HandleT>
388 static GarbageObject Get(WrappedObject<DerivedT, HandleT> *object)
389 {
390 // Using c-style cast here to avoid conditional compile for MSVC 32-bit
391 // which fails to compile with reinterpret_cast, requiring static_cast.
392 return GarbageObject(HandleTypeHelper<DerivedT>::kHandleType,
393 (GarbageHandle)(object->release()));
394 }
395
396 private:
397 VK_DEFINE_NON_DISPATCHABLE_HANDLE(GarbageHandle)
398 GarbageObject(HandleType handleType, GarbageHandle handle);
399
400 HandleType mHandleType;
401 GarbageHandle mHandle;
402 };
403
404 template <typename T>
405 GarbageObject GetGarbage(T *obj)
406 {
407 return GarbageObject::Get(obj);
408 }
409
410 // A list of garbage objects. Has no object lifetime information.
411 using GarbageList = std::vector<GarbageObject>;
412
413 // A list of garbage objects and the associated serial after which the objects can be destroyed.
414 class GarbageAndQueueSerial final : angle::NonCopyable
415 {
416 public:
417 GarbageAndQueueSerial() {}
418
419 GarbageAndQueueSerial(GarbageList &&object, QueueSerial serial)
420 : mObject(std::move(object)), mQueueSerial(serial)
421 {}
422
423 GarbageAndQueueSerial(GarbageAndQueueSerial &&other)
424 : mObject(std::move(other.mObject)), mQueueSerial(std::move(other.mQueueSerial))
425 {}
426 GarbageAndQueueSerial &operator=(GarbageAndQueueSerial &&other)
427 {
428 mObject = std::move(other.mObject);
429 mQueueSerial = std::move(other.mQueueSerial);
430 return *this;
431 }
432
433 QueueSerial getQueueSerial() const { return mQueueSerial; }
434 void updateQueueSerial(const QueueSerial &newQueueSerial) { mQueueSerial = newQueueSerial; }
435
436 const GarbageList &get() const { return mObject; }
437 GarbageList &get() { return mObject; }
438
439 private:
440 GarbageList mObject;
441 QueueSerial mQueueSerial;
442 };
443
444 // Houses multiple lists of garbage objects. Each sub-list has a different lifetime. They should be
445 // sorted such that later-living garbage is ordered later in the list.
446 using GarbageQueue = std::queue<GarbageAndQueueSerial>;
447
448 class MemoryProperties final : angle::NonCopyable
449 {
450 public:
451 MemoryProperties();
452
453 void init(VkPhysicalDevice physicalDevice);
454 bool hasLazilyAllocatedMemory() const;
455 angle::Result findCompatibleMemoryIndex(Context *context,
456 const VkMemoryRequirements &memoryRequirements,
457 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
458 bool isExternalMemory,
459 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
460 uint32_t *indexOut) const;
461 void destroy();
462
463 uint32_t getHeapIndexForMemoryType(uint32_t memoryType) const
464 {
465 if (memoryType == kInvalidMemoryTypeIndex)
466 {
467 return kInvalidMemoryHeapIndex;
468 }
469
470 ASSERT(memoryType < getMemoryTypeCount());
471 return mMemoryProperties.memoryTypes[memoryType].heapIndex;
472 }
473
474 VkDeviceSize getHeapSizeForMemoryType(uint32_t memoryType) const
475 {
476 uint32_t heapIndex = mMemoryProperties.memoryTypes[memoryType].heapIndex;
477 return mMemoryProperties.memoryHeaps[heapIndex].size;
478 }
479
480 const VkMemoryType &getMemoryType(uint32_t i) const { return mMemoryProperties.memoryTypes[i]; }
481
482 uint32_t getMemoryHeapCount() const { return mMemoryProperties.memoryHeapCount; }
483 uint32_t getMemoryTypeCount() const { return mMemoryProperties.memoryTypeCount; }
484
485 private:
486 VkPhysicalDeviceMemoryProperties mMemoryProperties;
487 };
488
489 // Similar to StagingImage, for Buffers.
490 class StagingBuffer final : angle::NonCopyable
491 {
492 public:
493 StagingBuffer();
494 void release(ContextVk *contextVk);
495 void collectGarbage(RendererVk *renderer, const QueueSerial &queueSerial);
496 void destroy(RendererVk *renderer);
497
498 angle::Result init(Context *context, VkDeviceSize size, StagingUsage usage);
499
500 Buffer &getBuffer() { return mBuffer; }
501 const Buffer &getBuffer() const { return mBuffer; }
502 size_t getSize() const { return mSize; }
503
504 private:
505 Buffer mBuffer;
506 Allocation mAllocation;
507 size_t mSize;
508 };
509
510 angle::Result InitMappableAllocation(Context *context,
511 const Allocator &allocator,
512 Allocation *allocation,
513 VkDeviceSize size,
514 int value,
515 VkMemoryPropertyFlags memoryPropertyFlags);
516
517 angle::Result AllocateBufferMemory(Context *context,
518 vk::MemoryAllocationType memoryAllocationType,
519 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
520 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
521 const void *extraAllocationInfo,
522 Buffer *buffer,
523 uint32_t *memoryTypeIndexOut,
524 DeviceMemory *deviceMemoryOut,
525 VkDeviceSize *sizeOut);
526
527 angle::Result AllocateImageMemory(Context *context,
528 vk::MemoryAllocationType memoryAllocationType,
529 VkMemoryPropertyFlags memoryPropertyFlags,
530 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
531 const void *extraAllocationInfo,
532 Image *image,
533 uint32_t *memoryTypeIndexOut,
534 DeviceMemory *deviceMemoryOut,
535 VkDeviceSize *sizeOut);
536
537 angle::Result AllocateImageMemoryWithRequirements(
538 Context *context,
539 vk::MemoryAllocationType memoryAllocationType,
540 VkMemoryPropertyFlags memoryPropertyFlags,
541 const VkMemoryRequirements &memoryRequirements,
542 const void *extraAllocationInfo,
543 const VkBindImagePlaneMemoryInfoKHR *extraBindInfo,
544 Image *image,
545 uint32_t *memoryTypeIndexOut,
546 DeviceMemory *deviceMemoryOut);
547
548 angle::Result AllocateBufferMemoryWithRequirements(Context *context,
549 MemoryAllocationType memoryAllocationType,
550 VkMemoryPropertyFlags memoryPropertyFlags,
551 const VkMemoryRequirements &memoryRequirements,
552 const void *extraAllocationInfo,
553 Buffer *buffer,
554 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
555 uint32_t *memoryTypeIndexOut,
556 DeviceMemory *deviceMemoryOut);
557
558 angle::Result InitShaderModule(Context *context,
559 ShaderModule *shaderModule,
560 const uint32_t *shaderCode,
561 size_t shaderCodeSize);
562
563 gl::TextureType Get2DTextureType(uint32_t layerCount, GLint samples);
564
565 enum class RecordingMode
566 {
567 Start,
568 Append,
569 };
570
571 // Helper class to handle RAII patterns for initialization. Requires that T have a destroy method
572 // that takes a VkDevice and returns void.
573 template <typename T>
574 class [[nodiscard]] DeviceScoped final : angle::NonCopyable
575 {
576 public:
577 DeviceScoped(VkDevice device) : mDevice(device) {}
578 ~DeviceScoped() { mVar.destroy(mDevice); }
579
580 const T &get() const { return mVar; }
581 T &get() { return mVar; }
582
583 T &&release() { return std::move(mVar); }
584
585 private:
586 VkDevice mDevice;
587 T mVar;
588 };
589
590 template <typename T>
591 class [[nodiscard]] AllocatorScoped final : angle::NonCopyable
592 {
593 public:
594 AllocatorScoped(const Allocator &allocator) : mAllocator(allocator) {}
595 ~AllocatorScoped() { mVar.destroy(mAllocator); }
596
597 const T &get() const { return mVar; }
598 T &get() { return mVar; }
599
600 T &&release() { return std::move(mVar); }
601
602 private:
603 const Allocator &mAllocator;
604 T mVar;
605 };
606
607 // Similar to DeviceScoped, but releases objects instead of destroying them. Requires that T have a
608 // release method that takes a ContextVk * and returns void.
609 template <typename T>
610 class [[nodiscard]] ContextScoped final : angle::NonCopyable
611 {
612 public:
613 ContextScoped(ContextVk *contextVk) : mContextVk(contextVk) {}
614 ~ContextScoped() { mVar.release(mContextVk); }
615
616 const T &get() const { return mVar; }
617 T &get() { return mVar; }
618
619 T &&release() { return std::move(mVar); }
620
621 private:
622 ContextVk *mContextVk;
623 T mVar;
624 };
625
626 template <typename T>
627 class [[nodiscard]] RendererScoped final : angle::NonCopyable
628 {
629 public:
630 RendererScoped(RendererVk *renderer) : mRenderer(renderer) {}
631 ~RendererScoped() { mVar.release(mRenderer); }
632
633 const T &get() const { return mVar; }
634 T &get() { return mVar; }
635
636 T &&release() { return std::move(mVar); }
637
638 private:
639 RendererVk *mRenderer;
640 T mVar;
641 };
642
643 // This is a very simple RefCount class that has no autoreleasing. Used in the descriptor set and
644 // pipeline layout caches.
645 template <typename T>
646 class RefCounted : angle::NonCopyable
647 {
648 public:
649 RefCounted() : mRefCount(0) {}
650 explicit RefCounted(T &&newObject) : mRefCount(0), mObject(std::move(newObject)) {}
651 ~RefCounted() { ASSERT(mRefCount == 0 && !mObject.valid()); }
652
653 RefCounted(RefCounted &©) : mRefCount(copy.mRefCount), mObject(std::move(copy.mObject))
654 {
655 ASSERT(this != ©);
656 copy.mRefCount = 0;
657 }
658
659 RefCounted &operator=(RefCounted &&rhs)
660 {
661 std::swap(mRefCount, rhs.mRefCount);
662 mObject = std::move(rhs.mObject);
663 return *this;
664 }
665
666 void addRef()
667 {
668 ASSERT(mRefCount != std::numeric_limits<uint32_t>::max());
669 mRefCount++;
670 }
671
672 void releaseRef()
673 {
674 ASSERT(isReferenced());
675 mRefCount--;
676 }
677
678 bool isReferenced() const { return mRefCount != 0; }
679
680 T &get() { return mObject; }
681 const T &get() const { return mObject; }
682
683 // A debug function to validate that the reference count is as expected used for assertions.
684 bool isRefCountAsExpected(uint32_t expectedRefCount) { return mRefCount == expectedRefCount; }
685
686 private:
687 uint32_t mRefCount;
688 T mObject;
689 };
690
691 template <typename T>
692 class BindingPointer final : angle::NonCopyable
693 {
694 public:
695 BindingPointer() = default;
696 ~BindingPointer() { reset(); }
697
698 BindingPointer(BindingPointer &&other) : mRefCounted(other.mRefCounted)
699 {
700 other.mRefCounted = nullptr;
701 }
702
703 void set(RefCounted<T> *refCounted)
704 {
705 if (mRefCounted)
706 {
707 mRefCounted->releaseRef();
708 }
709
710 mRefCounted = refCounted;
711
712 if (mRefCounted)
713 {
714 mRefCounted->addRef();
715 }
716 }
717
718 void reset() { set(nullptr); }
719
720 T &get() { return mRefCounted->get(); }
721 const T &get() const { return mRefCounted->get(); }
722
723 bool valid() const { return mRefCounted != nullptr; }
724
725 RefCounted<T> *getRefCounted() { return mRefCounted; }
726
727 private:
728 RefCounted<T> *mRefCounted = nullptr;
729 };
730
731 // Helper class to share ref-counted Vulkan objects. Requires that T have a destroy method
732 // that takes a VkDevice and returns void.
733 template <typename T>
734 class Shared final : angle::NonCopyable
735 {
736 public:
737 Shared() : mRefCounted(nullptr) {}
738 ~Shared() { ASSERT(mRefCounted == nullptr); }
739
740 Shared(Shared &&other) { *this = std::move(other); }
741 Shared &operator=(Shared &&other)
742 {
743 ASSERT(this != &other);
744 mRefCounted = other.mRefCounted;
745 other.mRefCounted = nullptr;
746 return *this;
747 }
748
749 void set(VkDevice device, RefCounted<T> *refCounted)
750 {
751 if (mRefCounted)
752 {
753 mRefCounted->releaseRef();
754 if (!mRefCounted->isReferenced())
755 {
756 mRefCounted->get().destroy(device);
757 SafeDelete(mRefCounted);
758 }
759 }
760
761 mRefCounted = refCounted;
762
763 if (mRefCounted)
764 {
765 mRefCounted->addRef();
766 }
767 }
768
769 void setUnreferenced(RefCounted<T> *refCounted)
770 {
771 ASSERT(!mRefCounted);
772 ASSERT(refCounted);
773
774 mRefCounted = refCounted;
775 mRefCounted->addRef();
776 }
777
778 void assign(VkDevice device, T &&newObject)
779 {
780 set(device, new RefCounted<T>(std::move(newObject)));
781 }
782
783 void copy(VkDevice device, const Shared<T> &other) { set(device, other.mRefCounted); }
784
785 void copyUnreferenced(const Shared<T> &other) { setUnreferenced(other.mRefCounted); }
786
787 void reset(VkDevice device) { set(device, nullptr); }
788
789 template <typename RecyclerT>
790 void resetAndRecycle(RecyclerT *recycler)
791 {
792 if (mRefCounted)
793 {
794 mRefCounted->releaseRef();
795 if (!mRefCounted->isReferenced())
796 {
797 ASSERT(mRefCounted->get().valid());
798 recycler->recycle(std::move(mRefCounted->get()));
799 SafeDelete(mRefCounted);
800 }
801
802 mRefCounted = nullptr;
803 }
804 }
805
806 template <typename OnRelease>
807 void resetAndRelease(OnRelease *onRelease)
808 {
809 if (mRefCounted)
810 {
811 mRefCounted->releaseRef();
812 if (!mRefCounted->isReferenced())
813 {
814 ASSERT(mRefCounted->get().valid());
815 (*onRelease)(std::move(mRefCounted->get()));
816 SafeDelete(mRefCounted);
817 }
818
819 mRefCounted = nullptr;
820 }
821 }
822
823 bool isReferenced() const
824 {
825 // If reference is zero, the object should have been deleted. I.e. if the object is not
826 // nullptr, it should have a reference.
827 ASSERT(!mRefCounted || mRefCounted->isReferenced());
828 return mRefCounted != nullptr;
829 }
830
831 T &get()
832 {
833 ASSERT(mRefCounted && mRefCounted->isReferenced());
834 return mRefCounted->get();
835 }
836 const T &get() const
837 {
838 ASSERT(mRefCounted && mRefCounted->isReferenced());
839 return mRefCounted->get();
840 }
841
842 private:
843 RefCounted<T> *mRefCounted;
844 };
845
846 template <typename T>
847 class Recycler final : angle::NonCopyable
848 {
849 public:
850 Recycler() = default;
851
852 void recycle(T &&garbageObject)
853 {
854 // Recycling invalid objects is pointless and potentially a bug.
855 ASSERT(garbageObject.valid());
856 mObjectFreeList.emplace_back(std::move(garbageObject));
857 }
858
859 void fetch(T *outObject)
860 {
861 ASSERT(!empty());
862 *outObject = std::move(mObjectFreeList.back());
863 mObjectFreeList.pop_back();
864 }
865
866 void destroy(VkDevice device)
867 {
868 for (T &object : mObjectFreeList)
869 {
870 object.destroy(device);
871 }
872 mObjectFreeList.clear();
873 }
874
875 bool empty() const { return mObjectFreeList.empty(); }
876
877 private:
878 std::vector<T> mObjectFreeList;
879 };
880
881 ANGLE_ENABLE_STRUCT_PADDING_WARNINGS
882 struct SpecializationConstants final
883 {
884 VkBool32 surfaceRotation;
885 uint32_t dither;
886 };
887 ANGLE_DISABLE_STRUCT_PADDING_WARNINGS
888
889 template <typename T>
890 using SpecializationConstantMap = angle::PackedEnumMap<sh::vk::SpecializationConstantId, T>;
891
892 using ShaderModulePointer = BindingPointer<ShaderModule>;
893 using ShaderModuleMap = gl::ShaderMap<ShaderModulePointer>;
894
895 void MakeDebugUtilsLabel(GLenum source, const char *marker, VkDebugUtilsLabelEXT *label);
896
897 constexpr size_t kUnpackedDepthIndex = gl::IMPLEMENTATION_MAX_DRAW_BUFFERS;
898 constexpr size_t kUnpackedStencilIndex = gl::IMPLEMENTATION_MAX_DRAW_BUFFERS + 1;
899 constexpr uint32_t kUnpackedColorBuffersMask =
900 angle::BitMask<uint32_t>(gl::IMPLEMENTATION_MAX_DRAW_BUFFERS);
901
902 class ClearValuesArray final
903 {
904 public:
905 ClearValuesArray();
906 ~ClearValuesArray();
907
908 ClearValuesArray(const ClearValuesArray &other);
909 ClearValuesArray &operator=(const ClearValuesArray &rhs);
910
911 void store(uint32_t index, VkImageAspectFlags aspectFlags, const VkClearValue &clearValue);
912 void storeNoDepthStencil(uint32_t index, const VkClearValue &clearValue);
913
914 void reset(size_t index)
915 {
916 mValues[index] = {};
917 mEnabled.reset(index);
918 }
919
920 bool test(size_t index) const { return mEnabled.test(index); }
921 bool testDepth() const { return mEnabled.test(kUnpackedDepthIndex); }
922 bool testStencil() const { return mEnabled.test(kUnpackedStencilIndex); }
923 gl::DrawBufferMask getColorMask() const;
924
925 const VkClearValue &operator[](size_t index) const { return mValues[index]; }
926
927 float getDepthValue() const { return mValues[kUnpackedDepthIndex].depthStencil.depth; }
928 uint32_t getStencilValue() const { return mValues[kUnpackedStencilIndex].depthStencil.stencil; }
929
930 const VkClearValue *data() const { return mValues.data(); }
931 bool empty() const { return mEnabled.none(); }
932 bool any() const { return mEnabled.any(); }
933
934 private:
935 gl::AttachmentArray<VkClearValue> mValues;
936 gl::AttachmentsMask mEnabled;
937 };
938
939 // Defines Serials for Vulkan objects.
940 #define ANGLE_VK_SERIAL_OP(X) \
941 X(Buffer) \
942 X(Image) \
943 X(ImageOrBufferView) \
944 X(Sampler)
945
946 #define ANGLE_DEFINE_VK_SERIAL_TYPE(Type) \
947 class Type##Serial \
948 { \
949 public: \
950 constexpr Type##Serial() : mSerial(kInvalid) {} \
951 constexpr explicit Type##Serial(uint32_t serial) : mSerial(serial) {} \
952 \
953 constexpr bool operator==(const Type##Serial &other) const \
954 { \
955 ASSERT(mSerial != kInvalid || other.mSerial != kInvalid); \
956 return mSerial == other.mSerial; \
957 } \
958 constexpr bool operator!=(const Type##Serial &other) const \
959 { \
960 ASSERT(mSerial != kInvalid || other.mSerial != kInvalid); \
961 return mSerial != other.mSerial; \
962 } \
963 constexpr uint32_t getValue() const \
964 { \
965 return mSerial; \
966 } \
967 constexpr bool valid() const \
968 { \
969 return mSerial != kInvalid; \
970 } \
971 \
972 private: \
973 uint32_t mSerial; \
974 static constexpr uint32_t kInvalid = 0; \
975 }; \
976 static constexpr Type##Serial kInvalid##Type##Serial = Type##Serial();
977
978 ANGLE_VK_SERIAL_OP(ANGLE_DEFINE_VK_SERIAL_TYPE)
979
980 #define ANGLE_DECLARE_GEN_VK_SERIAL(Type) Type##Serial generate##Type##Serial();
981
982 class ResourceSerialFactory final : angle::NonCopyable
983 {
984 public:
985 ResourceSerialFactory();
986 ~ResourceSerialFactory();
987
988 ANGLE_VK_SERIAL_OP(ANGLE_DECLARE_GEN_VK_SERIAL)
989
990 private:
991 uint32_t issueSerial();
992
993 // Kept atomic so it can be accessed from multiple Context threads at once.
994 std::atomic<uint32_t> mCurrentUniqueSerial;
995 };
996
997 #if defined(ANGLE_ENABLE_PERF_COUNTER_OUTPUT)
998 constexpr bool kOutputCumulativePerfCounters = ANGLE_ENABLE_PERF_COUNTER_OUTPUT;
999 #else
1000 constexpr bool kOutputCumulativePerfCounters = false;
1001 #endif
1002
1003 // Performance and resource counters.
1004 struct RenderPassPerfCounters
1005 {
1006 // load/storeOps. Includes ops for resolve attachment. Maximum value = 2.
1007 uint8_t colorLoadOpClears;
1008 uint8_t colorLoadOpLoads;
1009 uint8_t colorLoadOpNones;
1010 uint8_t colorStoreOpStores;
1011 uint8_t colorStoreOpNones;
1012 uint8_t depthLoadOpClears;
1013 uint8_t depthLoadOpLoads;
1014 uint8_t depthLoadOpNones;
1015 uint8_t depthStoreOpStores;
1016 uint8_t depthStoreOpNones;
1017 uint8_t stencilLoadOpClears;
1018 uint8_t stencilLoadOpLoads;
1019 uint8_t stencilLoadOpNones;
1020 uint8_t stencilStoreOpStores;
1021 uint8_t stencilStoreOpNones;
1022 // Number of unresolve and resolve operations. Maximum value for color =
1023 // gl::IMPLEMENTATION_MAX_DRAW_BUFFERS and for depth/stencil = 1 each.
1024 uint8_t colorAttachmentUnresolves;
1025 uint8_t colorAttachmentResolves;
1026 uint8_t depthAttachmentUnresolves;
1027 uint8_t depthAttachmentResolves;
1028 uint8_t stencilAttachmentUnresolves;
1029 uint8_t stencilAttachmentResolves;
1030 // Whether the depth/stencil attachment is using a read-only layout.
1031 uint8_t readOnlyDepthStencil;
1032 };
1033
1034 // A Vulkan image level index.
1035 using LevelIndex = gl::LevelIndexWrapper<uint32_t>;
1036
1037 // Ensure viewport is within Vulkan requirements
1038 void ClampViewport(VkViewport *viewport);
1039
1040 constexpr bool IsDynamicDescriptor(VkDescriptorType descriptorType)
1041 {
1042 switch (descriptorType)
1043 {
1044 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1045 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1046 return true;
1047 default:
1048 return false;
1049 }
1050 }
1051
1052 void ApplyPipelineCreationFeedback(Context *context, const VkPipelineCreationFeedback &feedback);
1053
1054 angle::Result SetDebugUtilsObjectName(ContextVk *contextVk,
1055 VkObjectType objectType,
1056 uint64_t handle,
1057 const std::string &label);
1058
1059 } // namespace vk
1060
1061 #if !defined(ANGLE_SHARED_LIBVULKAN)
1062 // Lazily load entry points for each extension as necessary.
1063 void InitDebugUtilsEXTFunctions(VkInstance instance);
1064 void InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance);
1065 void InitTransformFeedbackEXTFunctions(VkDevice device);
1066 void InitSamplerYcbcrKHRFunctions(VkDevice device);
1067 void InitRenderPass2KHRFunctions(VkDevice device);
1068
1069 # if defined(ANGLE_PLATFORM_FUCHSIA)
1070 // VK_FUCHSIA_imagepipe_surface
1071 void InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance);
1072 # endif
1073
1074 # if defined(ANGLE_PLATFORM_ANDROID)
1075 // VK_ANDROID_external_memory_android_hardware_buffer
1076 void InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance);
1077 # endif
1078
1079 # if defined(ANGLE_PLATFORM_GGP)
1080 // VK_GGP_stream_descriptor_surface
1081 void InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance);
1082 # endif // defined(ANGLE_PLATFORM_GGP)
1083
1084 // VK_KHR_external_semaphore_fd
1085 void InitExternalSemaphoreFdFunctions(VkInstance instance);
1086
1087 // VK_EXT_host_query_reset
1088 void InitHostQueryResetFunctions(VkDevice instance);
1089
1090 // VK_KHR_external_fence_capabilities
1091 void InitExternalFenceCapabilitiesFunctions(VkInstance instance);
1092
1093 // VK_KHR_get_memory_requirements2
1094 void InitGetMemoryRequirements2KHRFunctions(VkDevice device);
1095
1096 // VK_KHR_bind_memory2
1097 void InitBindMemory2KHRFunctions(VkDevice device);
1098
1099 // VK_KHR_external_fence_fd
1100 void InitExternalFenceFdFunctions(VkInstance instance);
1101
1102 // VK_KHR_external_semaphore_capabilities
1103 void InitExternalSemaphoreCapabilitiesFunctions(VkInstance instance);
1104
1105 // VK_KHR_shared_presentable_image
1106 void InitGetSwapchainStatusKHRFunctions(VkDevice device);
1107
1108 // VK_EXT_extended_dynamic_state
1109 void InitExtendedDynamicStateEXTFunctions(VkDevice device);
1110
1111 // VK_EXT_extended_dynamic_state2
1112 void InitExtendedDynamicState2EXTFunctions(VkDevice device);
1113
1114 // VK_KHR_fragment_shading_rate
1115 void InitFragmentShadingRateKHRInstanceFunction(VkInstance instance);
1116 void InitFragmentShadingRateKHRDeviceFunction(VkDevice device);
1117
1118 // VK_GOOGLE_display_timing
1119 void InitGetPastPresentationTimingGoogleFunction(VkDevice device);
1120
1121 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1122
1123 // Promoted to Vulkan 1.1
1124 void InitGetPhysicalDeviceProperties2KHRFunctionsFromCore();
1125 void InitExternalFenceCapabilitiesFunctionsFromCore();
1126 void InitExternalSemaphoreCapabilitiesFunctionsFromCore();
1127 void InitSamplerYcbcrKHRFunctionsFromCore();
1128 void InitGetMemoryRequirements2KHRFunctionsFromCore();
1129 void InitBindMemory2KHRFunctionsFromCore();
1130
1131 GLenum CalculateGenerateMipmapFilter(ContextVk *contextVk, angle::FormatID formatID);
1132 size_t PackSampleCount(GLint sampleCount);
1133
1134 namespace gl_vk
1135 {
1136 VkRect2D GetRect(const gl::Rectangle &source);
1137 VkFilter GetFilter(const GLenum filter);
1138 VkSamplerMipmapMode GetSamplerMipmapMode(const GLenum filter);
1139 VkSamplerAddressMode GetSamplerAddressMode(const GLenum wrap);
1140 VkPrimitiveTopology GetPrimitiveTopology(gl::PrimitiveMode mode);
1141 VkPolygonMode GetPolygonMode(const gl::PolygonMode polygonMode);
1142 VkCullModeFlagBits GetCullMode(const gl::RasterizerState &rasterState);
1143 VkFrontFace GetFrontFace(GLenum frontFace, bool invertCullFace);
1144 VkSampleCountFlagBits GetSamples(GLint sampleCount, bool limitSampleCountTo2);
1145 VkComponentSwizzle GetSwizzle(const GLenum swizzle);
1146 VkCompareOp GetCompareOp(const GLenum compareFunc);
1147 VkStencilOp GetStencilOp(const GLenum compareOp);
1148 VkLogicOp GetLogicOp(const GLenum logicOp);
1149
1150 constexpr gl::ShaderMap<VkShaderStageFlagBits> kShaderStageMap = {
1151 {gl::ShaderType::Vertex, VK_SHADER_STAGE_VERTEX_BIT},
1152 {gl::ShaderType::TessControl, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT},
1153 {gl::ShaderType::TessEvaluation, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT},
1154 {gl::ShaderType::Fragment, VK_SHADER_STAGE_FRAGMENT_BIT},
1155 {gl::ShaderType::Geometry, VK_SHADER_STAGE_GEOMETRY_BIT},
1156 {gl::ShaderType::Compute, VK_SHADER_STAGE_COMPUTE_BIT},
1157 };
1158
1159 void GetOffset(const gl::Offset &glOffset, VkOffset3D *vkOffset);
1160 void GetExtent(const gl::Extents &glExtent, VkExtent3D *vkExtent);
1161 VkImageType GetImageType(gl::TextureType textureType);
1162 VkImageViewType GetImageViewType(gl::TextureType textureType);
1163 VkColorComponentFlags GetColorComponentFlags(bool red, bool green, bool blue, bool alpha);
1164 VkShaderStageFlags GetShaderStageFlags(gl::ShaderBitSet activeShaders);
1165
1166 void GetViewport(const gl::Rectangle &viewport,
1167 float nearPlane,
1168 float farPlane,
1169 bool invertViewport,
1170 bool upperLeftOrigin,
1171 GLint renderAreaHeight,
1172 VkViewport *viewportOut);
1173
1174 void GetExtentsAndLayerCount(gl::TextureType textureType,
1175 const gl::Extents &extents,
1176 VkExtent3D *extentsOut,
1177 uint32_t *layerCountOut);
1178
1179 vk::LevelIndex GetLevelIndex(gl::LevelIndex levelGL, gl::LevelIndex baseLevel);
1180
1181 } // namespace gl_vk
1182
1183 namespace vk_gl
1184 {
1185 // The Vulkan back-end will not support a sample count of 1, because of a Vulkan specification
1186 // restriction:
1187 //
1188 // If the image was created with VkImageCreateInfo::samples equal to VK_SAMPLE_COUNT_1_BIT, the
1189 // instruction must: have MS = 0.
1190 //
1191 // This restriction was tracked in http://anglebug.com/4196 and Khronos-private Vulkan
1192 // specification issue https://gitlab.khronos.org/vulkan/vulkan/issues/1925.
1193 //
1194 // In addition, the Vulkan back-end will not support sample counts of 32 or 64, since there are no
1195 // standard sample locations for those sample counts.
1196 constexpr unsigned int kSupportedSampleCounts = (VK_SAMPLE_COUNT_2_BIT | VK_SAMPLE_COUNT_4_BIT |
1197 VK_SAMPLE_COUNT_8_BIT | VK_SAMPLE_COUNT_16_BIT);
1198
1199 // Find set bits in sampleCounts and add the corresponding sample count to the set.
1200 void AddSampleCounts(VkSampleCountFlags sampleCounts, gl::SupportedSampleSet *outSet);
1201 // Return the maximum sample count with a bit set in |sampleCounts|.
1202 GLuint GetMaxSampleCount(VkSampleCountFlags sampleCounts);
1203 // Return a supported sample count that's at least as large as the requested one.
1204 GLuint GetSampleCount(VkSampleCountFlags supportedCounts, GLuint requestedCount);
1205
1206 gl::LevelIndex GetLevelIndex(vk::LevelIndex levelVk, gl::LevelIndex baseLevel);
1207 } // namespace vk_gl
1208
1209 enum class RenderPassClosureReason
1210 {
1211 // Don't specify the reason (it should already be specified elsewhere)
1212 AlreadySpecifiedElsewhere,
1213
1214 // Implicit closures due to flush/wait/etc.
1215 ContextDestruction,
1216 ContextChange,
1217 GLFlush,
1218 GLFinish,
1219 EGLSwapBuffers,
1220 EGLWaitClient,
1221 SurfaceUnMakeCurrent,
1222
1223 // Closure due to switching rendering to another framebuffer.
1224 FramebufferBindingChange,
1225 FramebufferChange,
1226 NewRenderPass,
1227
1228 // Incompatible use of resource in the same render pass
1229 BufferUseThenXfbWrite,
1230 XfbWriteThenVertexIndexBuffer,
1231 XfbWriteThenIndirectDrawBuffer,
1232 XfbResumeAfterDrawBasedClear,
1233 DepthStencilUseInFeedbackLoop,
1234 DepthStencilWriteAfterFeedbackLoop,
1235 PipelineBindWhileXfbActive,
1236
1237 // Use of resource after render pass
1238 BufferWriteThenMap,
1239 BufferWriteThenOutOfRPRead,
1240 BufferUseThenOutOfRPWrite,
1241 ImageUseThenOutOfRPRead,
1242 ImageUseThenOutOfRPWrite,
1243 XfbWriteThenComputeRead,
1244 XfbWriteThenIndirectDispatchBuffer,
1245 ImageAttachmentThenComputeRead,
1246 GraphicsTextureImageAccessThenComputeAccess,
1247 GetQueryResult,
1248 BeginNonRenderPassQuery,
1249 EndNonRenderPassQuery,
1250 TimestampQuery,
1251 EndRenderPassQuery,
1252 GLReadPixels,
1253
1254 // Synchronization
1255 BufferUseThenReleaseToExternal,
1256 ImageUseThenReleaseToExternal,
1257 BufferInUseWhenSynchronizedMap,
1258 GLMemoryBarrierThenStorageResource,
1259 StorageResourceUseThenGLMemoryBarrier,
1260 ExternalSemaphoreSignal,
1261 SyncObjectInit,
1262 SyncObjectWithFdInit,
1263 SyncObjectClientWait,
1264 SyncObjectServerWait,
1265 SyncObjectGetStatus,
1266
1267 // Closures that ANGLE could have avoided, but doesn't for simplicity or optimization of more
1268 // common cases.
1269 XfbPause,
1270 FramebufferFetchEmulation,
1271 ColorBufferInvalidate,
1272 GenerateMipmapOnCPU,
1273 CopyTextureOnCPU,
1274 TextureReformatToRenderable,
1275 DeviceLocalBufferMap,
1276 OutOfReservedQueueSerialForOutsideCommands,
1277
1278 // UtilsVk
1279 PrepareForBlit,
1280 PrepareForImageCopy,
1281 TemporaryForImageClear,
1282 TemporaryForImageCopy,
1283 TemporaryForOverlayDraw,
1284
1285 // LegacyDithering requires updating the render pass
1286 LegacyDithering,
1287
1288 InvalidEnum,
1289 EnumCount = InvalidEnum,
1290 };
1291
1292 } // namespace rx
1293
1294 #define ANGLE_VK_TRY(context, command) \
1295 do \
1296 { \
1297 auto ANGLE_LOCAL_VAR = command; \
1298 if (ANGLE_UNLIKELY(ANGLE_LOCAL_VAR != VK_SUCCESS)) \
1299 { \
1300 (context)->handleError(ANGLE_LOCAL_VAR, __FILE__, ANGLE_FUNCTION, __LINE__); \
1301 return angle::Result::Stop; \
1302 } \
1303 } while (0)
1304
1305 #define ANGLE_VK_CHECK(context, test, error) ANGLE_VK_TRY(context, test ? VK_SUCCESS : error)
1306
1307 #define ANGLE_VK_CHECK_MATH(context, result) \
1308 ANGLE_VK_CHECK(context, result, VK_ERROR_VALIDATION_FAILED_EXT)
1309
1310 #define ANGLE_VK_CHECK_ALLOC(context, result) \
1311 ANGLE_VK_CHECK(context, result, VK_ERROR_OUT_OF_HOST_MEMORY)
1312
1313 #define ANGLE_VK_UNREACHABLE(context) \
1314 UNREACHABLE(); \
1315 ANGLE_VK_CHECK(context, false, VK_ERROR_FEATURE_NOT_PRESENT)
1316
1317 // NVIDIA uses special formatting for the driver version:
1318 // Major: 10
1319 // Minor: 8
1320 // Sub-minor: 8
1321 // patch: 6
1322 #define ANGLE_VK_VERSION_MAJOR_NVIDIA(version) (((uint32_t)(version) >> 22) & 0x3ff)
1323 #define ANGLE_VK_VERSION_MINOR_NVIDIA(version) (((uint32_t)(version) >> 14) & 0xff)
1324 #define ANGLE_VK_VERSION_SUB_MINOR_NVIDIA(version) (((uint32_t)(version) >> 6) & 0xff)
1325 #define ANGLE_VK_VERSION_PATCH_NVIDIA(version) ((uint32_t)(version)&0x3f)
1326
1327 // Similarly for Intel on Windows:
1328 // Major: 18
1329 // Minor: 14
1330 #define ANGLE_VK_VERSION_MAJOR_WIN_INTEL(version) (((uint32_t)(version) >> 14) & 0x3ffff)
1331 #define ANGLE_VK_VERSION_MINOR_WIN_INTEL(version) ((uint32_t)(version)&0x3fff)
1332
1333 #endif // LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
1334