• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_utils:
7 //    Helper functions for the Vulkan Renderer.
8 //
9 
10 #ifndef LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
11 #define LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
12 
13 #include <atomic>
14 #include <limits>
15 
16 #include "GLSLANG/ShaderLang.h"
17 #include "common/FixedVector.h"
18 #include "common/Optional.h"
19 #include "common/PackedEnums.h"
20 #include "common/debug.h"
21 #include "libANGLE/Error.h"
22 #include "libANGLE/Observer.h"
23 #include "libANGLE/renderer/serial_utils.h"
24 #include "libANGLE/renderer/vulkan/SecondaryCommandBuffer.h"
25 #include "libANGLE/renderer/vulkan/vk_wrapper.h"
26 #include "vulkan/vulkan_fuchsia_ext.h"
27 
28 #define ANGLE_GL_OBJECTS_X(PROC) \
29     PROC(Buffer)                 \
30     PROC(Context)                \
31     PROC(Framebuffer)            \
32     PROC(MemoryObject)           \
33     PROC(Query)                  \
34     PROC(Overlay)                \
35     PROC(Program)                \
36     PROC(ProgramPipeline)        \
37     PROC(Sampler)                \
38     PROC(Semaphore)              \
39     PROC(Texture)                \
40     PROC(TransformFeedback)      \
41     PROC(VertexArray)
42 
43 #define ANGLE_PRE_DECLARE_OBJECT(OBJ) class OBJ;
44 
45 namespace egl
46 {
47 class Display;
48 class Image;
49 }  // namespace egl
50 
51 namespace gl
52 {
53 struct Box;
54 class DummyOverlay;
55 struct Extents;
56 struct RasterizerState;
57 struct Rectangle;
58 class State;
59 struct SwizzleState;
60 struct VertexAttribute;
61 class VertexBinding;
62 
63 ANGLE_GL_OBJECTS_X(ANGLE_PRE_DECLARE_OBJECT)
64 }  // namespace gl
65 
66 #define ANGLE_PRE_DECLARE_VK_OBJECT(OBJ) class OBJ##Vk;
67 
68 namespace rx
69 {
70 class CommandGraphResource;
71 class DisplayVk;
72 class ImageVk;
73 class RenderTargetVk;
74 class RendererVk;
75 class RenderPassCache;
76 }  // namespace rx
77 
78 namespace angle
79 {
80 egl::Error ToEGL(Result result, rx::DisplayVk *displayVk, EGLint errorCode);
81 }  // namespace angle
82 
83 namespace rx
84 {
85 ANGLE_GL_OBJECTS_X(ANGLE_PRE_DECLARE_VK_OBJECT)
86 
87 const char *VulkanResultString(VkResult result);
88 
89 constexpr size_t kMaxVulkanLayers = 20;
90 using VulkanLayerVector           = angle::FixedVector<const char *, kMaxVulkanLayers>;
91 
92 // Verify that validation layers are available.
93 bool GetAvailableValidationLayers(const std::vector<VkLayerProperties> &layerProps,
94                                   bool mustHaveLayers,
95                                   VulkanLayerVector *enabledLayerNames);
96 
97 enum class TextureDimension
98 {
99     TEX_2D,
100     TEX_CUBE,
101     TEX_3D,
102     TEX_2D_ARRAY,
103 };
104 
105 namespace vk
106 {
107 struct Format;
108 
109 // Prepend ptr to the pNext chain at chainStart
110 template <typename VulkanStruct1, typename VulkanStruct2>
AddToPNextChain(VulkanStruct1 * chainStart,VulkanStruct2 * ptr)111 void AddToPNextChain(VulkanStruct1 *chainStart, VulkanStruct2 *ptr)
112 {
113     ASSERT(ptr->pNext == nullptr);
114 
115     VkBaseOutStructure *localPtr = reinterpret_cast<VkBaseOutStructure *>(chainStart);
116     ptr->pNext                   = localPtr->pNext;
117     localPtr->pNext              = reinterpret_cast<VkBaseOutStructure *>(ptr);
118 }
119 
120 extern const char *gLoaderLayersPathEnv;
121 extern const char *gLoaderICDFilenamesEnv;
122 extern const char *gANGLEPreferredDevice;
123 
124 enum class ICD
125 {
126     Default,
127     Mock,
128     SwiftShader,
129 };
130 
131 // Abstracts error handling. Implemented by both ContextVk for GL and DisplayVk for EGL errors.
132 class Context : angle::NonCopyable
133 {
134   public:
135     Context(RendererVk *renderer);
136     virtual ~Context();
137 
138     virtual void handleError(VkResult result,
139                              const char *file,
140                              const char *function,
141                              unsigned int line) = 0;
142     VkDevice getDevice() const;
getRenderer()143     RendererVk *getRenderer() const { return mRenderer; }
144 
145     // This is a special override needed so we can determine if we need to initialize images.
146     // It corresponds to the EGL or GL extensions depending on the vk::Context type.
147     virtual bool isRobustResourceInitEnabled() const = 0;
148 
149   protected:
150     RendererVk *const mRenderer;
151 };
152 
153 #if ANGLE_USE_CUSTOM_VULKAN_CMD_BUFFERS
154 using CommandBuffer = priv::SecondaryCommandBuffer;
155 #else
156 using CommandBuffer = priv::CommandBuffer;
157 #endif
158 
159 using PrimaryCommandBuffer = priv::CommandBuffer;
160 
161 VkImageAspectFlags GetDepthStencilAspectFlags(const angle::Format &format);
162 VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format);
163 
164 template <typename T>
165 struct ImplTypeHelper;
166 
167 // clang-format off
168 #define ANGLE_IMPL_TYPE_HELPER_GL(OBJ) \
169 template<>                             \
170 struct ImplTypeHelper<gl::OBJ>         \
171 {                                      \
172     using ImplType = OBJ##Vk;          \
173 };
174 // clang-format on
175 
176 ANGLE_GL_OBJECTS_X(ANGLE_IMPL_TYPE_HELPER_GL)
177 
178 template <>
179 struct ImplTypeHelper<gl::DummyOverlay>
180 {
181     using ImplType = OverlayVk;
182 };
183 
184 template <>
185 struct ImplTypeHelper<egl::Display>
186 {
187     using ImplType = DisplayVk;
188 };
189 
190 template <>
191 struct ImplTypeHelper<egl::Image>
192 {
193     using ImplType = ImageVk;
194 };
195 
196 template <typename T>
197 using GetImplType = typename ImplTypeHelper<T>::ImplType;
198 
199 template <typename T>
200 GetImplType<T> *GetImpl(const T *glObject)
201 {
202     return GetImplAs<GetImplType<T>>(glObject);
203 }
204 
205 template <>
206 inline OverlayVk *GetImpl(const gl::DummyOverlay *glObject)
207 {
208     return nullptr;
209 }
210 
211 template <typename ObjT>
212 class ObjectAndSerial final : angle::NonCopyable
213 {
214   public:
215     ObjectAndSerial() {}
216 
217     ObjectAndSerial(ObjT &&object, Serial serial) : mObject(std::move(object)), mSerial(serial) {}
218 
219     ObjectAndSerial(ObjectAndSerial &&other)
220         : mObject(std::move(other.mObject)), mSerial(std::move(other.mSerial))
221     {}
222     ObjectAndSerial &operator=(ObjectAndSerial &&other)
223     {
224         mObject = std::move(other.mObject);
225         mSerial = std::move(other.mSerial);
226         return *this;
227     }
228 
229     Serial getSerial() const { return mSerial; }
230     void updateSerial(Serial newSerial) { mSerial = newSerial; }
231 
232     const ObjT &get() const { return mObject; }
233     ObjT &get() { return mObject; }
234 
235     bool valid() const { return mObject.valid(); }
236 
237     void destroy(VkDevice device)
238     {
239         mObject.destroy(device);
240         mSerial = Serial();
241     }
242 
243   private:
244     ObjT mObject;
245     Serial mSerial;
246 };
247 
248 // Reference to a deleted object. The object is due to be destroyed at some point in the future.
249 // |mHandleType| determines the type of the object and which destroy function should be called.
250 class GarbageObject
251 {
252   public:
253     GarbageObject();
254     GarbageObject(GarbageObject &&other);
255     GarbageObject &operator=(GarbageObject &&rhs);
256 
257     bool valid() const { return mHandle != VK_NULL_HANDLE; }
258     void destroy(RendererVk *renderer);
259 
260     template <typename DerivedT, typename HandleT>
261     static GarbageObject Get(WrappedObject<DerivedT, HandleT> *object)
262     {
263         // Using c-style cast here to avoid conditional compile for MSVC 32-bit
264         //  which fails to compile with reinterpret_cast, requiring static_cast.
265         return GarbageObject(HandleTypeHelper<DerivedT>::kHandleType,
266                              (GarbageHandle)(object->release()));
267     }
268 
269   private:
270     VK_DEFINE_NON_DISPATCHABLE_HANDLE(GarbageHandle)
271     GarbageObject(HandleType handleType, GarbageHandle handle);
272 
273     HandleType mHandleType;
274     GarbageHandle mHandle;
275 };
276 
277 template <typename T>
278 GarbageObject GetGarbage(T *obj)
279 {
280     return GarbageObject::Get(obj);
281 }
282 
283 // A list of garbage objects. Has no object lifetime information.
284 using GarbageList = std::vector<GarbageObject>;
285 
286 // A list of garbage objects and the associated serial after which the objects can be destroyed.
287 using GarbageAndSerial = ObjectAndSerial<GarbageList>;
288 
289 // Houses multiple lists of garbage objects. Each sub-list has a different lifetime. They should be
290 // sorted such that later-living garbage is ordered later in the list.
291 using GarbageQueue = std::vector<GarbageAndSerial>;
292 
293 class MemoryProperties final : angle::NonCopyable
294 {
295   public:
296     MemoryProperties();
297 
298     void init(VkPhysicalDevice physicalDevice);
299     angle::Result findCompatibleMemoryIndex(Context *context,
300                                             const VkMemoryRequirements &memoryRequirements,
301                                             VkMemoryPropertyFlags requestedMemoryPropertyFlags,
302                                             VkMemoryPropertyFlags *memoryPropertyFlagsOut,
303                                             uint32_t *indexOut) const;
304     void destroy();
305 
306   private:
307     VkPhysicalDeviceMemoryProperties mMemoryProperties;
308 };
309 
310 // Similar to StagingImage, for Buffers.
311 class StagingBuffer final : angle::NonCopyable
312 {
313   public:
314     StagingBuffer();
315     void release(ContextVk *contextVk);
316     void collectGarbage(RendererVk *renderer, Serial serial);
317     void destroy(RendererVk *renderer);
318 
319     angle::Result init(Context *context, VkDeviceSize size, StagingUsage usage);
320 
321     Buffer &getBuffer() { return mBuffer; }
322     const Buffer &getBuffer() const { return mBuffer; }
323     size_t getSize() const { return mSize; }
324 
325   private:
326     Buffer mBuffer;
327     Allocation mAllocation;
328     size_t mSize;
329 };
330 
331 angle::Result InitMappableAllocation(VmaAllocator allocator,
332                                      Allocation *allcation,
333                                      VkDeviceSize size,
334                                      int value,
335                                      VkMemoryPropertyFlags memoryPropertyFlags);
336 
337 angle::Result InitMappableDeviceMemory(vk::Context *context,
338                                        vk::DeviceMemory *deviceMemory,
339                                        VkDeviceSize size,
340                                        int value,
341                                        VkMemoryPropertyFlags memoryPropertyFlags);
342 
343 angle::Result AllocateBufferMemory(Context *context,
344                                    VkMemoryPropertyFlags requestedMemoryPropertyFlags,
345                                    VkMemoryPropertyFlags *memoryPropertyFlagsOut,
346                                    const void *extraAllocationInfo,
347                                    Buffer *buffer,
348                                    DeviceMemory *deviceMemoryOut,
349                                    VkDeviceSize *sizeOut);
350 
351 angle::Result AllocateImageMemory(Context *context,
352                                   VkMemoryPropertyFlags memoryPropertyFlags,
353                                   const void *extraAllocationInfo,
354                                   Image *image,
355                                   DeviceMemory *deviceMemoryOut,
356                                   VkDeviceSize *sizeOut);
357 
358 angle::Result AllocateImageMemoryWithRequirements(Context *context,
359                                                   VkMemoryPropertyFlags memoryPropertyFlags,
360                                                   const VkMemoryRequirements &memoryRequirements,
361                                                   const void *extraAllocationInfo,
362                                                   Image *image,
363                                                   DeviceMemory *deviceMemoryOut);
364 
365 using ShaderAndSerial = ObjectAndSerial<ShaderModule>;
366 
367 angle::Result InitShaderAndSerial(Context *context,
368                                   ShaderAndSerial *shaderAndSerial,
369                                   const uint32_t *shaderCode,
370                                   size_t shaderCodeSize);
371 
372 gl::TextureType Get2DTextureType(uint32_t layerCount, GLint samples);
373 
374 enum class RecordingMode
375 {
376     Start,
377     Append,
378 };
379 
380 // Helper class to handle RAII patterns for initialization. Requires that T have a destroy method
381 // that takes a VkDevice and returns void.
382 template <typename T>
383 class DeviceScoped final : angle::NonCopyable
384 {
385   public:
386     DeviceScoped(VkDevice device) : mDevice(device) {}
387     ~DeviceScoped() { mVar.destroy(mDevice); }
388 
389     const T &get() const { return mVar; }
390     T &get() { return mVar; }
391 
392     T &&release() { return std::move(mVar); }
393 
394   private:
395     VkDevice mDevice;
396     T mVar;
397 };
398 
399 // Similar to DeviceScoped, but releases objects instead of destroying them. Requires that T have a
400 // release method that takes a ContextVk * and returns void.
401 template <typename T>
402 class ContextScoped final : angle::NonCopyable
403 {
404   public:
405     ContextScoped(ContextVk *contextVk) : mContextVk(contextVk) {}
406     ~ContextScoped() { mVar.release(mContextVk); }
407 
408     const T &get() const { return mVar; }
409     T &get() { return mVar; }
410 
411     T &&release() { return std::move(mVar); }
412 
413   private:
414     ContextVk *mContextVk;
415     T mVar;
416 };
417 
418 template <typename T>
419 class RendererScoped final : angle::NonCopyable
420 {
421   public:
422     RendererScoped(RendererVk *renderer) : mRenderer(renderer) {}
423     ~RendererScoped() { mVar.release(mRenderer); }
424 
425     const T &get() const { return mVar; }
426     T &get() { return mVar; }
427 
428     T &&release() { return std::move(mVar); }
429 
430   private:
431     RendererVk *mRenderer;
432     T mVar;
433 };
434 
435 // This is a very simple RefCount class that has no autoreleasing. Used in the descriptor set and
436 // pipeline layout caches.
437 template <typename T>
438 class RefCounted : angle::NonCopyable
439 {
440   public:
441     RefCounted() : mRefCount(0) {}
442     explicit RefCounted(T &&newObject) : mRefCount(0), mObject(std::move(newObject)) {}
443     ~RefCounted() { ASSERT(mRefCount == 0 && !mObject.valid()); }
444 
445     RefCounted(RefCounted &&copy) : mRefCount(copy.mRefCount), mObject(std::move(copy.mObject))
446     {
447         ASSERT(this != &copy);
448         copy.mRefCount = 0;
449     }
450 
451     RefCounted &operator=(RefCounted &&rhs)
452     {
453         std::swap(mRefCount, rhs.mRefCount);
454         mObject = std::move(rhs.mObject);
455         return *this;
456     }
457 
458     void addRef()
459     {
460         ASSERT(mRefCount != std::numeric_limits<uint32_t>::max());
461         mRefCount++;
462     }
463 
464     void releaseRef()
465     {
466         ASSERT(isReferenced());
467         mRefCount--;
468     }
469 
470     bool isReferenced() const { return mRefCount != 0; }
471 
472     T &get() { return mObject; }
473     const T &get() const { return mObject; }
474 
475   private:
476     uint32_t mRefCount;
477     T mObject;
478 };
479 
480 template <typename T>
481 class BindingPointer final : angle::NonCopyable
482 {
483   public:
484     BindingPointer() : mRefCounted(nullptr) {}
485 
486     ~BindingPointer() { reset(); }
487 
488     void set(RefCounted<T> *refCounted)
489     {
490         if (mRefCounted)
491         {
492             mRefCounted->releaseRef();
493         }
494 
495         mRefCounted = refCounted;
496 
497         if (mRefCounted)
498         {
499             mRefCounted->addRef();
500         }
501     }
502 
503     void reset() { set(nullptr); }
504 
505     T &get() { return mRefCounted->get(); }
506     const T &get() const { return mRefCounted->get(); }
507 
508     bool valid() const { return mRefCounted != nullptr; }
509 
510   private:
511     RefCounted<T> *mRefCounted;
512 };
513 
514 // Helper class to share ref-counted Vulkan objects.  Requires that T have a destroy method
515 // that takes a VkDevice and returns void.
516 template <typename T>
517 class Shared final : angle::NonCopyable
518 {
519   public:
520     Shared() : mRefCounted(nullptr) {}
521     ~Shared() { ASSERT(mRefCounted == nullptr); }
522 
523     Shared(Shared &&other) { *this = std::move(other); }
524     Shared &operator=(Shared &&other)
525     {
526         ASSERT(this != &other);
527         mRefCounted       = other.mRefCounted;
528         other.mRefCounted = nullptr;
529         return *this;
530     }
531 
532     void set(VkDevice device, RefCounted<T> *refCounted)
533     {
534         if (mRefCounted)
535         {
536             mRefCounted->releaseRef();
537             if (!mRefCounted->isReferenced())
538             {
539                 mRefCounted->get().destroy(device);
540                 SafeDelete(mRefCounted);
541             }
542         }
543 
544         mRefCounted = refCounted;
545 
546         if (mRefCounted)
547         {
548             mRefCounted->addRef();
549         }
550     }
551 
552     void assign(VkDevice device, T &&newObject)
553     {
554         set(device, new RefCounted<T>(std::move(newObject)));
555     }
556 
557     void copy(VkDevice device, const Shared<T> &other) { set(device, other.mRefCounted); }
558 
559     void reset(VkDevice device) { set(device, nullptr); }
560 
561     template <typename RecyclerT>
562     void resetAndRecycle(RecyclerT *recycler)
563     {
564         if (mRefCounted)
565         {
566             mRefCounted->releaseRef();
567             if (!mRefCounted->isReferenced())
568             {
569                 ASSERT(mRefCounted->get().valid());
570                 recycler->recycle(std::move(mRefCounted->get()));
571                 SafeDelete(mRefCounted);
572             }
573 
574             mRefCounted = nullptr;
575         }
576     }
577 
578     bool isReferenced() const
579     {
580         // If reference is zero, the object should have been deleted.  I.e. if the object is not
581         // nullptr, it should have a reference.
582         ASSERT(!mRefCounted || mRefCounted->isReferenced());
583         return mRefCounted != nullptr;
584     }
585 
586     T &get()
587     {
588         ASSERT(mRefCounted && mRefCounted->isReferenced());
589         return mRefCounted->get();
590     }
591     const T &get() const
592     {
593         ASSERT(mRefCounted && mRefCounted->isReferenced());
594         return mRefCounted->get();
595     }
596 
597   private:
598     RefCounted<T> *mRefCounted;
599 };
600 
601 template <typename T>
602 class Recycler final : angle::NonCopyable
603 {
604   public:
605     Recycler() = default;
606 
607     void recycle(T &&garbageObject) { mObjectFreeList.emplace_back(std::move(garbageObject)); }
608 
609     void fetch(T *outObject)
610     {
611         ASSERT(!empty());
612         *outObject = std::move(mObjectFreeList.back());
613         mObjectFreeList.pop_back();
614     }
615 
616     void destroy(VkDevice device)
617     {
618         for (T &object : mObjectFreeList)
619         {
620             object.destroy(device);
621         }
622     }
623 
624     bool empty() const { return mObjectFreeList.empty(); }
625 
626   private:
627     std::vector<T> mObjectFreeList;
628 };
629 
630 using SpecializationConstantBitSet =
631     angle::PackedEnumBitSet<sh::vk::SpecializationConstantId, uint32_t>;
632 static_assert(sizeof(SpecializationConstantBitSet) == sizeof(uint32_t), "Unexpected size");
633 
634 template <typename T>
635 using SpecializationConstantMap = angle::PackedEnumMap<sh::vk::SpecializationConstantId, T>;
636 
637 void MakeDebugUtilsLabel(GLenum source, const char *marker, VkDebugUtilsLabelEXT *label);
638 }  // namespace vk
639 
640 namespace gl_vk
641 {
642 VkRect2D GetRect(const gl::Rectangle &source);
643 VkFilter GetFilter(const GLenum filter);
644 VkSamplerMipmapMode GetSamplerMipmapMode(const GLenum filter);
645 VkSamplerAddressMode GetSamplerAddressMode(const GLenum wrap);
646 VkPrimitiveTopology GetPrimitiveTopology(gl::PrimitiveMode mode);
647 VkCullModeFlagBits GetCullMode(const gl::RasterizerState &rasterState);
648 VkFrontFace GetFrontFace(GLenum frontFace, bool invertCullFace);
649 VkSampleCountFlagBits GetSamples(GLint sampleCount);
650 VkComponentSwizzle GetSwizzle(const GLenum swizzle);
651 VkCompareOp GetCompareOp(const GLenum compareFunc);
652 
653 constexpr gl::ShaderMap<VkShaderStageFlagBits> kShaderStageMap = {
654     {gl::ShaderType::Vertex, VK_SHADER_STAGE_VERTEX_BIT},
655     {gl::ShaderType::Fragment, VK_SHADER_STAGE_FRAGMENT_BIT},
656     {gl::ShaderType::Geometry, VK_SHADER_STAGE_GEOMETRY_BIT},
657     {gl::ShaderType::Compute, VK_SHADER_STAGE_COMPUTE_BIT},
658 };
659 
660 void GetOffset(const gl::Offset &glOffset, VkOffset3D *vkOffset);
661 void GetExtent(const gl::Extents &glExtent, VkExtent3D *vkExtent);
662 VkImageType GetImageType(gl::TextureType textureType);
663 VkImageViewType GetImageViewType(gl::TextureType textureType);
664 VkColorComponentFlags GetColorComponentFlags(bool red, bool green, bool blue, bool alpha);
665 VkShaderStageFlags GetShaderStageFlags(gl::ShaderBitSet activeShaders);
666 
667 void GetViewport(const gl::Rectangle &viewport,
668                  float nearPlane,
669                  float farPlane,
670                  bool invertViewport,
671                  GLint renderAreaHeight,
672                  VkViewport *viewportOut);
673 
674 void GetExtentsAndLayerCount(gl::TextureType textureType,
675                              const gl::Extents &extents,
676                              VkExtent3D *extentsOut,
677                              uint32_t *layerCountOut);
678 }  // namespace gl_vk
679 
680 namespace vk_gl
681 {
682 // The Vulkan back-end will not support a sample count of 1, because of a Vulkan specification
683 // restriction:
684 //
685 //   If the image was created with VkImageCreateInfo::samples equal to VK_SAMPLE_COUNT_1_BIT, the
686 //   instruction must: have MS = 0.
687 //
688 // This restriction was tracked in http://anglebug.com/4196 and Khronos-private Vulkan
689 // specification issue https://gitlab.khronos.org/vulkan/vulkan/issues/1925.
690 //
691 // In addition, the Vulkan back-end will not support sample counts of 32 or 64, since there are no
692 // standard sample locations for those sample counts.
693 constexpr unsigned int kSupportedSampleCounts = (VK_SAMPLE_COUNT_2_BIT | VK_SAMPLE_COUNT_4_BIT |
694                                                  VK_SAMPLE_COUNT_8_BIT | VK_SAMPLE_COUNT_16_BIT);
695 
696 // Find set bits in sampleCounts and add the corresponding sample count to the set.
697 void AddSampleCounts(VkSampleCountFlags sampleCounts, gl::SupportedSampleSet *outSet);
698 // Return the maximum sample count with a bit set in |sampleCounts|.
699 GLuint GetMaxSampleCount(VkSampleCountFlags sampleCounts);
700 // Return a supported sample count that's at least as large as the requested one.
701 GLuint GetSampleCount(VkSampleCountFlags supportedCounts, GLuint requestedCount);
702 }  // namespace vk_gl
703 
704 }  // namespace rx
705 
706 #define ANGLE_VK_TRY(context, command)                                                 \
707     do                                                                                 \
708     {                                                                                  \
709         auto ANGLE_LOCAL_VAR = command;                                                \
710         if (ANGLE_UNLIKELY(ANGLE_LOCAL_VAR != VK_SUCCESS))                             \
711         {                                                                              \
712             context->handleError(ANGLE_LOCAL_VAR, __FILE__, ANGLE_FUNCTION, __LINE__); \
713             return angle::Result::Stop;                                                \
714         }                                                                              \
715     } while (0)
716 
717 #define ANGLE_VK_CHECK(context, test, error) ANGLE_VK_TRY(context, test ? VK_SUCCESS : error)
718 
719 #define ANGLE_VK_CHECK_MATH(context, result) \
720     ANGLE_VK_CHECK(context, result, VK_ERROR_VALIDATION_FAILED_EXT)
721 
722 #define ANGLE_VK_CHECK_ALLOC(context, result) \
723     ANGLE_VK_CHECK(context, result, VK_ERROR_OUT_OF_HOST_MEMORY)
724 
725 #define ANGLE_VK_UNREACHABLE(context) \
726     UNREACHABLE();                    \
727     ANGLE_VK_CHECK(context, false, VK_ERROR_FEATURE_NOT_PRESENT)
728 
729 #endif  // LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
730