1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_utils:
7 // Helper functions for the Vulkan Renderer.
8 //
9
10 #ifndef LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
11 #define LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
12
13 #include <atomic>
14 #include <limits>
15
16 #include "GLSLANG/ShaderLang.h"
17 #include "common/FixedVector.h"
18 #include "common/Optional.h"
19 #include "common/PackedEnums.h"
20 #include "common/debug.h"
21 #include "libANGLE/Error.h"
22 #include "libANGLE/Observer.h"
23 #include "libANGLE/renderer/serial_utils.h"
24 #include "libANGLE/renderer/vulkan/SecondaryCommandBuffer.h"
25 #include "libANGLE/renderer/vulkan/vk_wrapper.h"
26 #include "vulkan/vulkan_fuchsia_ext.h"
27
28 #define ANGLE_GL_OBJECTS_X(PROC) \
29 PROC(Buffer) \
30 PROC(Context) \
31 PROC(Framebuffer) \
32 PROC(MemoryObject) \
33 PROC(Query) \
34 PROC(Overlay) \
35 PROC(Program) \
36 PROC(ProgramPipeline) \
37 PROC(Sampler) \
38 PROC(Semaphore) \
39 PROC(Texture) \
40 PROC(TransformFeedback) \
41 PROC(VertexArray)
42
43 #define ANGLE_PRE_DECLARE_OBJECT(OBJ) class OBJ;
44
45 namespace egl
46 {
47 class Display;
48 class Image;
49 } // namespace egl
50
51 namespace gl
52 {
53 struct Box;
54 class DummyOverlay;
55 struct Extents;
56 struct RasterizerState;
57 struct Rectangle;
58 class State;
59 struct SwizzleState;
60 struct VertexAttribute;
61 class VertexBinding;
62
63 ANGLE_GL_OBJECTS_X(ANGLE_PRE_DECLARE_OBJECT)
64 } // namespace gl
65
66 #define ANGLE_PRE_DECLARE_VK_OBJECT(OBJ) class OBJ##Vk;
67
68 namespace rx
69 {
70 class CommandGraphResource;
71 class DisplayVk;
72 class ImageVk;
73 class RenderTargetVk;
74 class RendererVk;
75 class RenderPassCache;
76 } // namespace rx
77
78 namespace angle
79 {
80 egl::Error ToEGL(Result result, rx::DisplayVk *displayVk, EGLint errorCode);
81 } // namespace angle
82
83 namespace rx
84 {
85 ANGLE_GL_OBJECTS_X(ANGLE_PRE_DECLARE_VK_OBJECT)
86
87 const char *VulkanResultString(VkResult result);
88
89 constexpr size_t kMaxVulkanLayers = 20;
90 using VulkanLayerVector = angle::FixedVector<const char *, kMaxVulkanLayers>;
91
92 // Verify that validation layers are available.
93 bool GetAvailableValidationLayers(const std::vector<VkLayerProperties> &layerProps,
94 bool mustHaveLayers,
95 VulkanLayerVector *enabledLayerNames);
96
97 enum class TextureDimension
98 {
99 TEX_2D,
100 TEX_CUBE,
101 TEX_3D,
102 TEX_2D_ARRAY,
103 };
104
105 namespace vk
106 {
107 struct Format;
108
109 // Prepend ptr to the pNext chain at chainStart
110 template <typename VulkanStruct1, typename VulkanStruct2>
AddToPNextChain(VulkanStruct1 * chainStart,VulkanStruct2 * ptr)111 void AddToPNextChain(VulkanStruct1 *chainStart, VulkanStruct2 *ptr)
112 {
113 ASSERT(ptr->pNext == nullptr);
114
115 VkBaseOutStructure *localPtr = reinterpret_cast<VkBaseOutStructure *>(chainStart);
116 ptr->pNext = localPtr->pNext;
117 localPtr->pNext = reinterpret_cast<VkBaseOutStructure *>(ptr);
118 }
119
120 // Abstracts error handling. Implemented by both ContextVk for GL and DisplayVk for EGL errors.
121 class Context : angle::NonCopyable
122 {
123 public:
124 Context(RendererVk *renderer);
125 virtual ~Context();
126
127 virtual void handleError(VkResult result,
128 const char *file,
129 const char *function,
130 unsigned int line) = 0;
131 VkDevice getDevice() const;
getRenderer()132 RendererVk *getRenderer() const { return mRenderer; }
133
134 // This is a special override needed so we can determine if we need to initialize images.
135 // It corresponds to the EGL or GL extensions depending on the vk::Context type.
136 virtual bool isRobustResourceInitEnabled() const = 0;
137
138 protected:
139 RendererVk *const mRenderer;
140 };
141
142 #if ANGLE_USE_CUSTOM_VULKAN_CMD_BUFFERS
143 using CommandBuffer = priv::SecondaryCommandBuffer;
144 #else
145 using CommandBuffer = priv::CommandBuffer;
146 #endif
147
148 using PrimaryCommandBuffer = priv::CommandBuffer;
149
150 VkImageAspectFlags GetDepthStencilAspectFlags(const angle::Format &format);
151 VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format);
152
153 template <typename T>
154 struct ImplTypeHelper;
155
156 // clang-format off
157 #define ANGLE_IMPL_TYPE_HELPER_GL(OBJ) \
158 template<> \
159 struct ImplTypeHelper<gl::OBJ> \
160 { \
161 using ImplType = OBJ##Vk; \
162 };
163 // clang-format on
164
165 ANGLE_GL_OBJECTS_X(ANGLE_IMPL_TYPE_HELPER_GL)
166
167 template <>
168 struct ImplTypeHelper<gl::DummyOverlay>
169 {
170 using ImplType = OverlayVk;
171 };
172
173 template <>
174 struct ImplTypeHelper<egl::Display>
175 {
176 using ImplType = DisplayVk;
177 };
178
179 template <>
180 struct ImplTypeHelper<egl::Image>
181 {
182 using ImplType = ImageVk;
183 };
184
185 template <typename T>
186 using GetImplType = typename ImplTypeHelper<T>::ImplType;
187
188 template <typename T>
189 GetImplType<T> *GetImpl(const T *glObject)
190 {
191 return GetImplAs<GetImplType<T>>(glObject);
192 }
193
194 template <>
195 inline OverlayVk *GetImpl(const gl::DummyOverlay *glObject)
196 {
197 return nullptr;
198 }
199
200 template <typename ObjT>
201 class ObjectAndSerial final : angle::NonCopyable
202 {
203 public:
204 ObjectAndSerial() {}
205
206 ObjectAndSerial(ObjT &&object, Serial serial) : mObject(std::move(object)), mSerial(serial) {}
207
208 ObjectAndSerial(ObjectAndSerial &&other)
209 : mObject(std::move(other.mObject)), mSerial(std::move(other.mSerial))
210 {}
211 ObjectAndSerial &operator=(ObjectAndSerial &&other)
212 {
213 mObject = std::move(other.mObject);
214 mSerial = std::move(other.mSerial);
215 return *this;
216 }
217
218 Serial getSerial() const { return mSerial; }
219 void updateSerial(Serial newSerial) { mSerial = newSerial; }
220
221 const ObjT &get() const { return mObject; }
222 ObjT &get() { return mObject; }
223
224 bool valid() const { return mObject.valid(); }
225
226 void destroy(VkDevice device)
227 {
228 mObject.destroy(device);
229 mSerial = Serial();
230 }
231
232 private:
233 ObjT mObject;
234 Serial mSerial;
235 };
236
237 // Reference to a deleted object. The object is due to be destroyed at some point in the future.
238 // |mHandleType| determines the type of the object and which destroy function should be called.
239 class GarbageObject
240 {
241 public:
242 GarbageObject();
243 GarbageObject(GarbageObject &&other);
244 GarbageObject &operator=(GarbageObject &&rhs);
245
246 bool valid() const { return mHandle != VK_NULL_HANDLE; }
247 void destroy(RendererVk *renderer);
248
249 template <typename DerivedT, typename HandleT>
250 static GarbageObject Get(WrappedObject<DerivedT, HandleT> *object)
251 {
252 // Using c-style cast here to avoid conditional compile for MSVC 32-bit
253 // which fails to compile with reinterpret_cast, requiring static_cast.
254 return GarbageObject(HandleTypeHelper<DerivedT>::kHandleType,
255 (GarbageHandle)(object->release()));
256 }
257
258 private:
259 VK_DEFINE_NON_DISPATCHABLE_HANDLE(GarbageHandle)
260 GarbageObject(HandleType handleType, GarbageHandle handle);
261
262 HandleType mHandleType;
263 GarbageHandle mHandle;
264 };
265
266 template <typename T>
267 GarbageObject GetGarbage(T *obj)
268 {
269 return GarbageObject::Get(obj);
270 }
271
272 // A list of garbage objects. Has no object lifetime information.
273 using GarbageList = std::vector<GarbageObject>;
274
275 // A list of garbage objects and the associated serial after which the objects can be destroyed.
276 using GarbageAndSerial = ObjectAndSerial<GarbageList>;
277
278 // Houses multiple lists of garbage objects. Each sub-list has a different lifetime. They should be
279 // sorted such that later-living garbage is ordered later in the list.
280 using GarbageQueue = std::vector<GarbageAndSerial>;
281
282 class MemoryProperties final : angle::NonCopyable
283 {
284 public:
285 MemoryProperties();
286
287 void init(VkPhysicalDevice physicalDevice);
288 angle::Result findCompatibleMemoryIndex(Context *context,
289 const VkMemoryRequirements &memoryRequirements,
290 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
291 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
292 uint32_t *indexOut) const;
293 void destroy();
294
295 VkDeviceSize getHeapSizeForMemoryType(uint32_t memoryType) const
296 {
297 uint32_t heapIndex = mMemoryProperties.memoryTypes[memoryType].heapIndex;
298 return mMemoryProperties.memoryHeaps[heapIndex].size;
299 }
300
301 private:
302 VkPhysicalDeviceMemoryProperties mMemoryProperties;
303 };
304
305 // Similar to StagingImage, for Buffers.
306 class StagingBuffer final : angle::NonCopyable
307 {
308 public:
309 StagingBuffer();
310 void release(ContextVk *contextVk);
311 void collectGarbage(RendererVk *renderer, Serial serial);
312 void destroy(RendererVk *renderer);
313
314 angle::Result init(Context *context, VkDeviceSize size, StagingUsage usage);
315
316 Buffer &getBuffer() { return mBuffer; }
317 const Buffer &getBuffer() const { return mBuffer; }
318 size_t getSize() const { return mSize; }
319
320 private:
321 Buffer mBuffer;
322 Allocation mAllocation;
323 size_t mSize;
324 };
325
326 angle::Result InitMappableAllocation(const vk::Allocator &allocator,
327 Allocation *allocation,
328 VkDeviceSize size,
329 int value,
330 VkMemoryPropertyFlags memoryPropertyFlags);
331
332 angle::Result InitMappableDeviceMemory(vk::Context *context,
333 vk::DeviceMemory *deviceMemory,
334 VkDeviceSize size,
335 int value,
336 VkMemoryPropertyFlags memoryPropertyFlags);
337
338 angle::Result AllocateBufferMemory(Context *context,
339 VkMemoryPropertyFlags requestedMemoryPropertyFlags,
340 VkMemoryPropertyFlags *memoryPropertyFlagsOut,
341 const void *extraAllocationInfo,
342 Buffer *buffer,
343 DeviceMemory *deviceMemoryOut,
344 VkDeviceSize *sizeOut);
345
346 angle::Result AllocateImageMemory(Context *context,
347 VkMemoryPropertyFlags memoryPropertyFlags,
348 const void *extraAllocationInfo,
349 Image *image,
350 DeviceMemory *deviceMemoryOut,
351 VkDeviceSize *sizeOut);
352
353 angle::Result AllocateImageMemoryWithRequirements(Context *context,
354 VkMemoryPropertyFlags memoryPropertyFlags,
355 const VkMemoryRequirements &memoryRequirements,
356 const void *extraAllocationInfo,
357 Image *image,
358 DeviceMemory *deviceMemoryOut);
359
360 using ShaderAndSerial = ObjectAndSerial<ShaderModule>;
361
362 angle::Result InitShaderAndSerial(Context *context,
363 ShaderAndSerial *shaderAndSerial,
364 const uint32_t *shaderCode,
365 size_t shaderCodeSize);
366
367 gl::TextureType Get2DTextureType(uint32_t layerCount, GLint samples);
368
369 enum class RecordingMode
370 {
371 Start,
372 Append,
373 };
374
375 // Helper class to handle RAII patterns for initialization. Requires that T have a destroy method
376 // that takes a VkDevice and returns void.
377 template <typename T>
378 class DeviceScoped final : angle::NonCopyable
379 {
380 public:
381 DeviceScoped(VkDevice device) : mDevice(device) {}
382 ~DeviceScoped() { mVar.destroy(mDevice); }
383
384 const T &get() const { return mVar; }
385 T &get() { return mVar; }
386
387 T &&release() { return std::move(mVar); }
388
389 private:
390 VkDevice mDevice;
391 T mVar;
392 };
393
394 // Similar to DeviceScoped, but releases objects instead of destroying them. Requires that T have a
395 // release method that takes a ContextVk * and returns void.
396 template <typename T>
397 class ContextScoped final : angle::NonCopyable
398 {
399 public:
400 ContextScoped(ContextVk *contextVk) : mContextVk(contextVk) {}
401 ~ContextScoped() { mVar.release(mContextVk); }
402
403 const T &get() const { return mVar; }
404 T &get() { return mVar; }
405
406 T &&release() { return std::move(mVar); }
407
408 private:
409 ContextVk *mContextVk;
410 T mVar;
411 };
412
413 template <typename T>
414 class RendererScoped final : angle::NonCopyable
415 {
416 public:
417 RendererScoped(RendererVk *renderer) : mRenderer(renderer) {}
418 ~RendererScoped() { mVar.release(mRenderer); }
419
420 const T &get() const { return mVar; }
421 T &get() { return mVar; }
422
423 T &&release() { return std::move(mVar); }
424
425 private:
426 RendererVk *mRenderer;
427 T mVar;
428 };
429
430 // This is a very simple RefCount class that has no autoreleasing. Used in the descriptor set and
431 // pipeline layout caches.
432 template <typename T>
433 class RefCounted : angle::NonCopyable
434 {
435 public:
436 RefCounted() : mRefCount(0) {}
437 explicit RefCounted(T &&newObject) : mRefCount(0), mObject(std::move(newObject)) {}
438 ~RefCounted() { ASSERT(mRefCount == 0 && !mObject.valid()); }
439
440 RefCounted(RefCounted &©) : mRefCount(copy.mRefCount), mObject(std::move(copy.mObject))
441 {
442 ASSERT(this != ©);
443 copy.mRefCount = 0;
444 }
445
446 RefCounted &operator=(RefCounted &&rhs)
447 {
448 std::swap(mRefCount, rhs.mRefCount);
449 mObject = std::move(rhs.mObject);
450 return *this;
451 }
452
453 void addRef()
454 {
455 ASSERT(mRefCount != std::numeric_limits<uint32_t>::max());
456 mRefCount++;
457 }
458
459 void releaseRef()
460 {
461 ASSERT(isReferenced());
462 mRefCount--;
463 }
464
465 bool isReferenced() const { return mRefCount != 0; }
466
467 T &get() { return mObject; }
468 const T &get() const { return mObject; }
469
470 private:
471 uint32_t mRefCount;
472 T mObject;
473 };
474
475 template <typename T>
476 class BindingPointer final : angle::NonCopyable
477 {
478 public:
479 BindingPointer() : mRefCounted(nullptr) {}
480
481 ~BindingPointer() { reset(); }
482
483 void set(RefCounted<T> *refCounted)
484 {
485 if (mRefCounted)
486 {
487 mRefCounted->releaseRef();
488 }
489
490 mRefCounted = refCounted;
491
492 if (mRefCounted)
493 {
494 mRefCounted->addRef();
495 }
496 }
497
498 void reset() { set(nullptr); }
499
500 T &get() { return mRefCounted->get(); }
501 const T &get() const { return mRefCounted->get(); }
502
503 bool valid() const { return mRefCounted != nullptr; }
504
505 private:
506 RefCounted<T> *mRefCounted;
507 };
508
509 // Helper class to share ref-counted Vulkan objects. Requires that T have a destroy method
510 // that takes a VkDevice and returns void.
511 template <typename T>
512 class Shared final : angle::NonCopyable
513 {
514 public:
515 Shared() : mRefCounted(nullptr) {}
516 ~Shared() { ASSERT(mRefCounted == nullptr); }
517
518 Shared(Shared &&other) { *this = std::move(other); }
519 Shared &operator=(Shared &&other)
520 {
521 ASSERT(this != &other);
522 mRefCounted = other.mRefCounted;
523 other.mRefCounted = nullptr;
524 return *this;
525 }
526
527 void set(VkDevice device, RefCounted<T> *refCounted)
528 {
529 if (mRefCounted)
530 {
531 mRefCounted->releaseRef();
532 if (!mRefCounted->isReferenced())
533 {
534 mRefCounted->get().destroy(device);
535 SafeDelete(mRefCounted);
536 }
537 }
538
539 mRefCounted = refCounted;
540
541 if (mRefCounted)
542 {
543 mRefCounted->addRef();
544 }
545 }
546
547 void assign(VkDevice device, T &&newObject)
548 {
549 set(device, new RefCounted<T>(std::move(newObject)));
550 }
551
552 void copy(VkDevice device, const Shared<T> &other) { set(device, other.mRefCounted); }
553
554 void reset(VkDevice device) { set(device, nullptr); }
555
556 template <typename RecyclerT>
557 void resetAndRecycle(RecyclerT *recycler)
558 {
559 if (mRefCounted)
560 {
561 mRefCounted->releaseRef();
562 if (!mRefCounted->isReferenced())
563 {
564 ASSERT(mRefCounted->get().valid());
565 recycler->recycle(std::move(mRefCounted->get()));
566 SafeDelete(mRefCounted);
567 }
568
569 mRefCounted = nullptr;
570 }
571 }
572
573 bool isReferenced() const
574 {
575 // If reference is zero, the object should have been deleted. I.e. if the object is not
576 // nullptr, it should have a reference.
577 ASSERT(!mRefCounted || mRefCounted->isReferenced());
578 return mRefCounted != nullptr;
579 }
580
581 T &get()
582 {
583 ASSERT(mRefCounted && mRefCounted->isReferenced());
584 return mRefCounted->get();
585 }
586 const T &get() const
587 {
588 ASSERT(mRefCounted && mRefCounted->isReferenced());
589 return mRefCounted->get();
590 }
591
592 private:
593 RefCounted<T> *mRefCounted;
594 };
595
596 template <typename T>
597 class Recycler final : angle::NonCopyable
598 {
599 public:
600 Recycler() = default;
601
602 void recycle(T &&garbageObject) { mObjectFreeList.emplace_back(std::move(garbageObject)); }
603
604 void fetch(T *outObject)
605 {
606 ASSERT(!empty());
607 *outObject = std::move(mObjectFreeList.back());
608 mObjectFreeList.pop_back();
609 }
610
611 void destroy(VkDevice device)
612 {
613 for (T &object : mObjectFreeList)
614 {
615 object.destroy(device);
616 }
617 }
618
619 bool empty() const { return mObjectFreeList.empty(); }
620
621 private:
622 std::vector<T> mObjectFreeList;
623 };
624
625 using SpecializationConstantBitSet =
626 angle::PackedEnumBitSet<sh::vk::SpecializationConstantId, uint32_t>;
627 static_assert(sizeof(SpecializationConstantBitSet) == sizeof(uint32_t), "Unexpected size");
628
629 template <typename T>
630 using SpecializationConstantMap = angle::PackedEnumMap<sh::vk::SpecializationConstantId, T>;
631
632 void MakeDebugUtilsLabel(GLenum source, const char *marker, VkDebugUtilsLabelEXT *label);
633
634 constexpr size_t kClearValueDepthIndex = gl::IMPLEMENTATION_MAX_DRAW_BUFFERS;
635 constexpr size_t kClearValueStencilIndex = gl::IMPLEMENTATION_MAX_DRAW_BUFFERS + 1;
636
637 class ClearValuesArray final
638 {
639 public:
640 ClearValuesArray();
641 ~ClearValuesArray();
642
643 ClearValuesArray(const ClearValuesArray &other);
644 ClearValuesArray &operator=(const ClearValuesArray &rhs);
645
646 void store(uint32_t index, VkImageAspectFlags aspectFlags, const VkClearValue &clearValue);
647
648 void reset(size_t index)
649 {
650 mValues[index] = {};
651 mEnabled.reset(index);
652 }
653
654 bool test(size_t index) const { return mEnabled.test(index); }
655 bool testDepth() const { return mEnabled.test(kClearValueDepthIndex); }
656 bool testStencil() const { return mEnabled.test(kClearValueStencilIndex); }
657
658 const VkClearValue &operator[](size_t index) const { return mValues[index]; }
659
660 float getDepthValue() const { return mValues[kClearValueDepthIndex].depthStencil.depth; }
661 uint32_t getStencilValue() const
662 {
663 return mValues[kClearValueStencilIndex].depthStencil.stencil;
664 }
665
666 const VkClearValue *data() const { return mValues.data(); }
667 bool empty() const { return mEnabled.none(); }
668
669 gl::DrawBufferMask getEnabledColorAttachmentsMask() const
670 {
671 return gl::DrawBufferMask(mEnabled.to_ulong());
672 }
673
674 private:
675 gl::AttachmentArray<VkClearValue> mValues;
676 gl::AttachmentsMask mEnabled;
677 };
678 } // namespace vk
679
680 #if !defined(ANGLE_SHARED_LIBVULKAN)
681 // Lazily load entry points for each extension as necessary.
682 void InitDebugUtilsEXTFunctions(VkInstance instance);
683 void InitDebugReportEXTFunctions(VkInstance instance);
684 void InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance);
685 void InitTransformFeedbackEXTFunctions(VkDevice device);
686
687 # if defined(ANGLE_PLATFORM_FUCHSIA)
688 // VK_FUCHSIA_imagepipe_surface
689 void InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance);
690 # endif
691
692 # if defined(ANGLE_PLATFORM_ANDROID)
693 // VK_ANDROID_external_memory_android_hardware_buffer
694 void InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance);
695 # endif
696
697 # if defined(ANGLE_PLATFORM_GGP)
698 // VK_GGP_stream_descriptor_surface
699 void InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance);
700 # endif // defined(ANGLE_PLATFORM_GGP)
701
702 // VK_KHR_external_semaphore_fd
703 void InitExternalSemaphoreFdFunctions(VkInstance instance);
704
705 // VK_EXT_external_memory_host
706 void InitExternalMemoryHostFunctions(VkInstance instance);
707
708 // VK_KHR_external_fence_capabilities
709 void InitExternalFenceCapabilitiesFunctions(VkInstance instance);
710
711 // VK_KHR_external_fence_fd
712 void InitExternalFenceFdFunctions(VkInstance instance);
713
714 // VK_KHR_external_semaphore_capabilities
715 void InitExternalSemaphoreCapabilitiesFunctions(VkInstance instance);
716
717 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
718
719 namespace gl_vk
720 {
721 VkRect2D GetRect(const gl::Rectangle &source);
722 VkFilter GetFilter(const GLenum filter);
723 VkSamplerMipmapMode GetSamplerMipmapMode(const GLenum filter);
724 VkSamplerAddressMode GetSamplerAddressMode(const GLenum wrap);
725 VkPrimitiveTopology GetPrimitiveTopology(gl::PrimitiveMode mode);
726 VkCullModeFlagBits GetCullMode(const gl::RasterizerState &rasterState);
727 VkFrontFace GetFrontFace(GLenum frontFace, bool invertCullFace);
728 VkSampleCountFlagBits GetSamples(GLint sampleCount);
729 VkComponentSwizzle GetSwizzle(const GLenum swizzle);
730 VkCompareOp GetCompareOp(const GLenum compareFunc);
731
732 constexpr gl::ShaderMap<VkShaderStageFlagBits> kShaderStageMap = {
733 {gl::ShaderType::Vertex, VK_SHADER_STAGE_VERTEX_BIT},
734 {gl::ShaderType::Fragment, VK_SHADER_STAGE_FRAGMENT_BIT},
735 {gl::ShaderType::Geometry, VK_SHADER_STAGE_GEOMETRY_BIT},
736 {gl::ShaderType::Compute, VK_SHADER_STAGE_COMPUTE_BIT},
737 };
738
739 void GetOffset(const gl::Offset &glOffset, VkOffset3D *vkOffset);
740 void GetExtent(const gl::Extents &glExtent, VkExtent3D *vkExtent);
741 VkImageType GetImageType(gl::TextureType textureType);
742 VkImageViewType GetImageViewType(gl::TextureType textureType);
743 VkColorComponentFlags GetColorComponentFlags(bool red, bool green, bool blue, bool alpha);
744 VkShaderStageFlags GetShaderStageFlags(gl::ShaderBitSet activeShaders);
745
746 void GetViewport(const gl::Rectangle &viewport,
747 float nearPlane,
748 float farPlane,
749 bool invertViewport,
750 GLint renderAreaHeight,
751 VkViewport *viewportOut);
752
753 void GetExtentsAndLayerCount(gl::TextureType textureType,
754 const gl::Extents &extents,
755 VkExtent3D *extentsOut,
756 uint32_t *layerCountOut);
757 } // namespace gl_vk
758
759 namespace vk_gl
760 {
761 // The Vulkan back-end will not support a sample count of 1, because of a Vulkan specification
762 // restriction:
763 //
764 // If the image was created with VkImageCreateInfo::samples equal to VK_SAMPLE_COUNT_1_BIT, the
765 // instruction must: have MS = 0.
766 //
767 // This restriction was tracked in http://anglebug.com/4196 and Khronos-private Vulkan
768 // specification issue https://gitlab.khronos.org/vulkan/vulkan/issues/1925.
769 //
770 // In addition, the Vulkan back-end will not support sample counts of 32 or 64, since there are no
771 // standard sample locations for those sample counts.
772 constexpr unsigned int kSupportedSampleCounts = (VK_SAMPLE_COUNT_2_BIT | VK_SAMPLE_COUNT_4_BIT |
773 VK_SAMPLE_COUNT_8_BIT | VK_SAMPLE_COUNT_16_BIT);
774
775 // Find set bits in sampleCounts and add the corresponding sample count to the set.
776 void AddSampleCounts(VkSampleCountFlags sampleCounts, gl::SupportedSampleSet *outSet);
777 // Return the maximum sample count with a bit set in |sampleCounts|.
778 GLuint GetMaxSampleCount(VkSampleCountFlags sampleCounts);
779 // Return a supported sample count that's at least as large as the requested one.
780 GLuint GetSampleCount(VkSampleCountFlags supportedCounts, GLuint requestedCount);
781 } // namespace vk_gl
782
783 } // namespace rx
784
785 #define ANGLE_VK_TRY(context, command) \
786 do \
787 { \
788 auto ANGLE_LOCAL_VAR = command; \
789 if (ANGLE_UNLIKELY(ANGLE_LOCAL_VAR != VK_SUCCESS)) \
790 { \
791 context->handleError(ANGLE_LOCAL_VAR, __FILE__, ANGLE_FUNCTION, __LINE__); \
792 return angle::Result::Stop; \
793 } \
794 } while (0)
795
796 #define ANGLE_VK_CHECK(context, test, error) ANGLE_VK_TRY(context, test ? VK_SUCCESS : error)
797
798 #define ANGLE_VK_CHECK_MATH(context, result) \
799 ANGLE_VK_CHECK(context, result, VK_ERROR_VALIDATION_FAILED_EXT)
800
801 #define ANGLE_VK_CHECK_ALLOC(context, result) \
802 ANGLE_VK_CHECK(context, result, VK_ERROR_OUT_OF_HOST_MEMORY)
803
804 #define ANGLE_VK_UNREACHABLE(context) \
805 UNREACHABLE(); \
806 ANGLE_VK_CHECK(context, false, VK_ERROR_FEATURE_NOT_PRESENT)
807
808 #endif // LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_
809