// // Copyright 2016 The ANGLE Project Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // // vk_utils: // Helper functions for the Vulkan Renderer. // #ifndef LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_ #define LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_ #include #include #include "GLSLANG/ShaderLang.h" #include "common/FixedVector.h" #include "common/Optional.h" #include "common/PackedEnums.h" #include "common/debug.h" #include "libANGLE/Error.h" #include "libANGLE/Observer.h" #include "libANGLE/renderer/serial_utils.h" #include "libANGLE/renderer/vulkan/SecondaryCommandBuffer.h" #include "libANGLE/renderer/vulkan/vk_wrapper.h" #include "vulkan/vulkan_fuchsia_ext.h" #define ANGLE_GL_OBJECTS_X(PROC) \ PROC(Buffer) \ PROC(Context) \ PROC(Framebuffer) \ PROC(MemoryObject) \ PROC(Query) \ PROC(Overlay) \ PROC(Program) \ PROC(ProgramPipeline) \ PROC(Sampler) \ PROC(Semaphore) \ PROC(Texture) \ PROC(TransformFeedback) \ PROC(VertexArray) #define ANGLE_PRE_DECLARE_OBJECT(OBJ) class OBJ; namespace egl { class Display; class Image; } // namespace egl namespace gl { struct Box; class DummyOverlay; struct Extents; struct RasterizerState; struct Rectangle; class State; struct SwizzleState; struct VertexAttribute; class VertexBinding; ANGLE_GL_OBJECTS_X(ANGLE_PRE_DECLARE_OBJECT) } // namespace gl #define ANGLE_PRE_DECLARE_VK_OBJECT(OBJ) class OBJ##Vk; namespace rx { class CommandGraphResource; class DisplayVk; class ImageVk; class RenderTargetVk; class RendererVk; class RenderPassCache; } // namespace rx namespace angle { egl::Error ToEGL(Result result, rx::DisplayVk *displayVk, EGLint errorCode); } // namespace angle namespace rx { ANGLE_GL_OBJECTS_X(ANGLE_PRE_DECLARE_VK_OBJECT) const char *VulkanResultString(VkResult result); constexpr size_t kMaxVulkanLayers = 20; using VulkanLayerVector = angle::FixedVector; // Verify that validation layers are available. bool GetAvailableValidationLayers(const std::vector &layerProps, bool mustHaveLayers, VulkanLayerVector *enabledLayerNames); enum class TextureDimension { TEX_2D, TEX_CUBE, TEX_3D, TEX_2D_ARRAY, }; namespace vk { struct Format; // Prepend ptr to the pNext chain at chainStart template void AddToPNextChain(VulkanStruct1 *chainStart, VulkanStruct2 *ptr) { ASSERT(ptr->pNext == nullptr); VkBaseOutStructure *localPtr = reinterpret_cast(chainStart); ptr->pNext = localPtr->pNext; localPtr->pNext = reinterpret_cast(ptr); } // Abstracts error handling. Implemented by both ContextVk for GL and DisplayVk for EGL errors. class Context : angle::NonCopyable { public: Context(RendererVk *renderer); virtual ~Context(); virtual void handleError(VkResult result, const char *file, const char *function, unsigned int line) = 0; VkDevice getDevice() const; RendererVk *getRenderer() const { return mRenderer; } // This is a special override needed so we can determine if we need to initialize images. // It corresponds to the EGL or GL extensions depending on the vk::Context type. virtual bool isRobustResourceInitEnabled() const = 0; protected: RendererVk *const mRenderer; }; #if ANGLE_USE_CUSTOM_VULKAN_CMD_BUFFERS using CommandBuffer = priv::SecondaryCommandBuffer; #else using CommandBuffer = priv::CommandBuffer; #endif using PrimaryCommandBuffer = priv::CommandBuffer; VkImageAspectFlags GetDepthStencilAspectFlags(const angle::Format &format); VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format); template struct ImplTypeHelper; // clang-format off #define ANGLE_IMPL_TYPE_HELPER_GL(OBJ) \ template<> \ struct ImplTypeHelper \ { \ using ImplType = OBJ##Vk; \ }; // clang-format on ANGLE_GL_OBJECTS_X(ANGLE_IMPL_TYPE_HELPER_GL) template <> struct ImplTypeHelper { using ImplType = OverlayVk; }; template <> struct ImplTypeHelper { using ImplType = DisplayVk; }; template <> struct ImplTypeHelper { using ImplType = ImageVk; }; template using GetImplType = typename ImplTypeHelper::ImplType; template GetImplType *GetImpl(const T *glObject) { return GetImplAs>(glObject); } template <> inline OverlayVk *GetImpl(const gl::DummyOverlay *glObject) { return nullptr; } template class ObjectAndSerial final : angle::NonCopyable { public: ObjectAndSerial() {} ObjectAndSerial(ObjT &&object, Serial serial) : mObject(std::move(object)), mSerial(serial) {} ObjectAndSerial(ObjectAndSerial &&other) : mObject(std::move(other.mObject)), mSerial(std::move(other.mSerial)) {} ObjectAndSerial &operator=(ObjectAndSerial &&other) { mObject = std::move(other.mObject); mSerial = std::move(other.mSerial); return *this; } Serial getSerial() const { return mSerial; } void updateSerial(Serial newSerial) { mSerial = newSerial; } const ObjT &get() const { return mObject; } ObjT &get() { return mObject; } bool valid() const { return mObject.valid(); } void destroy(VkDevice device) { mObject.destroy(device); mSerial = Serial(); } private: ObjT mObject; Serial mSerial; }; // Reference to a deleted object. The object is due to be destroyed at some point in the future. // |mHandleType| determines the type of the object and which destroy function should be called. class GarbageObject { public: GarbageObject(); GarbageObject(GarbageObject &&other); GarbageObject &operator=(GarbageObject &&rhs); bool valid() const { return mHandle != VK_NULL_HANDLE; } void destroy(RendererVk *renderer); template static GarbageObject Get(WrappedObject *object) { // Using c-style cast here to avoid conditional compile for MSVC 32-bit // which fails to compile with reinterpret_cast, requiring static_cast. return GarbageObject(HandleTypeHelper::kHandleType, (GarbageHandle)(object->release())); } private: VK_DEFINE_NON_DISPATCHABLE_HANDLE(GarbageHandle) GarbageObject(HandleType handleType, GarbageHandle handle); HandleType mHandleType; GarbageHandle mHandle; }; template GarbageObject GetGarbage(T *obj) { return GarbageObject::Get(obj); } // A list of garbage objects. Has no object lifetime information. using GarbageList = std::vector; // A list of garbage objects and the associated serial after which the objects can be destroyed. using GarbageAndSerial = ObjectAndSerial; // Houses multiple lists of garbage objects. Each sub-list has a different lifetime. They should be // sorted such that later-living garbage is ordered later in the list. using GarbageQueue = std::vector; class MemoryProperties final : angle::NonCopyable { public: MemoryProperties(); void init(VkPhysicalDevice physicalDevice); angle::Result findCompatibleMemoryIndex(Context *context, const VkMemoryRequirements &memoryRequirements, VkMemoryPropertyFlags requestedMemoryPropertyFlags, VkMemoryPropertyFlags *memoryPropertyFlagsOut, uint32_t *indexOut) const; void destroy(); VkDeviceSize getHeapSizeForMemoryType(uint32_t memoryType) const { uint32_t heapIndex = mMemoryProperties.memoryTypes[memoryType].heapIndex; return mMemoryProperties.memoryHeaps[heapIndex].size; } private: VkPhysicalDeviceMemoryProperties mMemoryProperties; }; // Similar to StagingImage, for Buffers. class StagingBuffer final : angle::NonCopyable { public: StagingBuffer(); void release(ContextVk *contextVk); void collectGarbage(RendererVk *renderer, Serial serial); void destroy(RendererVk *renderer); angle::Result init(Context *context, VkDeviceSize size, StagingUsage usage); Buffer &getBuffer() { return mBuffer; } const Buffer &getBuffer() const { return mBuffer; } size_t getSize() const { return mSize; } private: Buffer mBuffer; Allocation mAllocation; size_t mSize; }; angle::Result InitMappableAllocation(const vk::Allocator &allocator, Allocation *allocation, VkDeviceSize size, int value, VkMemoryPropertyFlags memoryPropertyFlags); angle::Result InitMappableDeviceMemory(vk::Context *context, vk::DeviceMemory *deviceMemory, VkDeviceSize size, int value, VkMemoryPropertyFlags memoryPropertyFlags); angle::Result AllocateBufferMemory(Context *context, VkMemoryPropertyFlags requestedMemoryPropertyFlags, VkMemoryPropertyFlags *memoryPropertyFlagsOut, const void *extraAllocationInfo, Buffer *buffer, DeviceMemory *deviceMemoryOut, VkDeviceSize *sizeOut); angle::Result AllocateImageMemory(Context *context, VkMemoryPropertyFlags memoryPropertyFlags, const void *extraAllocationInfo, Image *image, DeviceMemory *deviceMemoryOut, VkDeviceSize *sizeOut); angle::Result AllocateImageMemoryWithRequirements(Context *context, VkMemoryPropertyFlags memoryPropertyFlags, const VkMemoryRequirements &memoryRequirements, const void *extraAllocationInfo, Image *image, DeviceMemory *deviceMemoryOut); using ShaderAndSerial = ObjectAndSerial; angle::Result InitShaderAndSerial(Context *context, ShaderAndSerial *shaderAndSerial, const uint32_t *shaderCode, size_t shaderCodeSize); gl::TextureType Get2DTextureType(uint32_t layerCount, GLint samples); enum class RecordingMode { Start, Append, }; // Helper class to handle RAII patterns for initialization. Requires that T have a destroy method // that takes a VkDevice and returns void. template class DeviceScoped final : angle::NonCopyable { public: DeviceScoped(VkDevice device) : mDevice(device) {} ~DeviceScoped() { mVar.destroy(mDevice); } const T &get() const { return mVar; } T &get() { return mVar; } T &&release() { return std::move(mVar); } private: VkDevice mDevice; T mVar; }; // Similar to DeviceScoped, but releases objects instead of destroying them. Requires that T have a // release method that takes a ContextVk * and returns void. template class ContextScoped final : angle::NonCopyable { public: ContextScoped(ContextVk *contextVk) : mContextVk(contextVk) {} ~ContextScoped() { mVar.release(mContextVk); } const T &get() const { return mVar; } T &get() { return mVar; } T &&release() { return std::move(mVar); } private: ContextVk *mContextVk; T mVar; }; template class RendererScoped final : angle::NonCopyable { public: RendererScoped(RendererVk *renderer) : mRenderer(renderer) {} ~RendererScoped() { mVar.release(mRenderer); } const T &get() const { return mVar; } T &get() { return mVar; } T &&release() { return std::move(mVar); } private: RendererVk *mRenderer; T mVar; }; // This is a very simple RefCount class that has no autoreleasing. Used in the descriptor set and // pipeline layout caches. template class RefCounted : angle::NonCopyable { public: RefCounted() : mRefCount(0) {} explicit RefCounted(T &&newObject) : mRefCount(0), mObject(std::move(newObject)) {} ~RefCounted() { ASSERT(mRefCount == 0 && !mObject.valid()); } RefCounted(RefCounted &©) : mRefCount(copy.mRefCount), mObject(std::move(copy.mObject)) { ASSERT(this != ©); copy.mRefCount = 0; } RefCounted &operator=(RefCounted &&rhs) { std::swap(mRefCount, rhs.mRefCount); mObject = std::move(rhs.mObject); return *this; } void addRef() { ASSERT(mRefCount != std::numeric_limits::max()); mRefCount++; } void releaseRef() { ASSERT(isReferenced()); mRefCount--; } bool isReferenced() const { return mRefCount != 0; } T &get() { return mObject; } const T &get() const { return mObject; } private: uint32_t mRefCount; T mObject; }; template class BindingPointer final : angle::NonCopyable { public: BindingPointer() : mRefCounted(nullptr) {} ~BindingPointer() { reset(); } void set(RefCounted *refCounted) { if (mRefCounted) { mRefCounted->releaseRef(); } mRefCounted = refCounted; if (mRefCounted) { mRefCounted->addRef(); } } void reset() { set(nullptr); } T &get() { return mRefCounted->get(); } const T &get() const { return mRefCounted->get(); } bool valid() const { return mRefCounted != nullptr; } private: RefCounted *mRefCounted; }; // Helper class to share ref-counted Vulkan objects. Requires that T have a destroy method // that takes a VkDevice and returns void. template class Shared final : angle::NonCopyable { public: Shared() : mRefCounted(nullptr) {} ~Shared() { ASSERT(mRefCounted == nullptr); } Shared(Shared &&other) { *this = std::move(other); } Shared &operator=(Shared &&other) { ASSERT(this != &other); mRefCounted = other.mRefCounted; other.mRefCounted = nullptr; return *this; } void set(VkDevice device, RefCounted *refCounted) { if (mRefCounted) { mRefCounted->releaseRef(); if (!mRefCounted->isReferenced()) { mRefCounted->get().destroy(device); SafeDelete(mRefCounted); } } mRefCounted = refCounted; if (mRefCounted) { mRefCounted->addRef(); } } void assign(VkDevice device, T &&newObject) { set(device, new RefCounted(std::move(newObject))); } void copy(VkDevice device, const Shared &other) { set(device, other.mRefCounted); } void reset(VkDevice device) { set(device, nullptr); } template void resetAndRecycle(RecyclerT *recycler) { if (mRefCounted) { mRefCounted->releaseRef(); if (!mRefCounted->isReferenced()) { ASSERT(mRefCounted->get().valid()); recycler->recycle(std::move(mRefCounted->get())); SafeDelete(mRefCounted); } mRefCounted = nullptr; } } bool isReferenced() const { // If reference is zero, the object should have been deleted. I.e. if the object is not // nullptr, it should have a reference. ASSERT(!mRefCounted || mRefCounted->isReferenced()); return mRefCounted != nullptr; } T &get() { ASSERT(mRefCounted && mRefCounted->isReferenced()); return mRefCounted->get(); } const T &get() const { ASSERT(mRefCounted && mRefCounted->isReferenced()); return mRefCounted->get(); } private: RefCounted *mRefCounted; }; template class Recycler final : angle::NonCopyable { public: Recycler() = default; void recycle(T &&garbageObject) { mObjectFreeList.emplace_back(std::move(garbageObject)); } void fetch(T *outObject) { ASSERT(!empty()); *outObject = std::move(mObjectFreeList.back()); mObjectFreeList.pop_back(); } void destroy(VkDevice device) { for (T &object : mObjectFreeList) { object.destroy(device); } } bool empty() const { return mObjectFreeList.empty(); } private: std::vector mObjectFreeList; }; using SpecializationConstantBitSet = angle::PackedEnumBitSet; static_assert(sizeof(SpecializationConstantBitSet) == sizeof(uint32_t), "Unexpected size"); template using SpecializationConstantMap = angle::PackedEnumMap; void MakeDebugUtilsLabel(GLenum source, const char *marker, VkDebugUtilsLabelEXT *label); constexpr size_t kClearValueDepthIndex = gl::IMPLEMENTATION_MAX_DRAW_BUFFERS; constexpr size_t kClearValueStencilIndex = gl::IMPLEMENTATION_MAX_DRAW_BUFFERS + 1; class ClearValuesArray final { public: ClearValuesArray(); ~ClearValuesArray(); ClearValuesArray(const ClearValuesArray &other); ClearValuesArray &operator=(const ClearValuesArray &rhs); void store(uint32_t index, VkImageAspectFlags aspectFlags, const VkClearValue &clearValue); void reset(size_t index) { mValues[index] = {}; mEnabled.reset(index); } bool test(size_t index) const { return mEnabled.test(index); } bool testDepth() const { return mEnabled.test(kClearValueDepthIndex); } bool testStencil() const { return mEnabled.test(kClearValueStencilIndex); } const VkClearValue &operator[](size_t index) const { return mValues[index]; } float getDepthValue() const { return mValues[kClearValueDepthIndex].depthStencil.depth; } uint32_t getStencilValue() const { return mValues[kClearValueStencilIndex].depthStencil.stencil; } const VkClearValue *data() const { return mValues.data(); } bool empty() const { return mEnabled.none(); } gl::DrawBufferMask getEnabledColorAttachmentsMask() const { return gl::DrawBufferMask(mEnabled.to_ulong()); } private: gl::AttachmentArray mValues; gl::AttachmentsMask mEnabled; }; } // namespace vk #if !defined(ANGLE_SHARED_LIBVULKAN) // Lazily load entry points for each extension as necessary. void InitDebugUtilsEXTFunctions(VkInstance instance); void InitDebugReportEXTFunctions(VkInstance instance); void InitGetPhysicalDeviceProperties2KHRFunctions(VkInstance instance); void InitTransformFeedbackEXTFunctions(VkDevice device); # if defined(ANGLE_PLATFORM_FUCHSIA) // VK_FUCHSIA_imagepipe_surface void InitImagePipeSurfaceFUCHSIAFunctions(VkInstance instance); # endif # if defined(ANGLE_PLATFORM_ANDROID) // VK_ANDROID_external_memory_android_hardware_buffer void InitExternalMemoryHardwareBufferANDROIDFunctions(VkInstance instance); # endif # if defined(ANGLE_PLATFORM_GGP) // VK_GGP_stream_descriptor_surface void InitGGPStreamDescriptorSurfaceFunctions(VkInstance instance); # endif // defined(ANGLE_PLATFORM_GGP) // VK_KHR_external_semaphore_fd void InitExternalSemaphoreFdFunctions(VkInstance instance); // VK_EXT_external_memory_host void InitExternalMemoryHostFunctions(VkInstance instance); // VK_KHR_external_fence_capabilities void InitExternalFenceCapabilitiesFunctions(VkInstance instance); // VK_KHR_external_fence_fd void InitExternalFenceFdFunctions(VkInstance instance); // VK_KHR_external_semaphore_capabilities void InitExternalSemaphoreCapabilitiesFunctions(VkInstance instance); #endif // !defined(ANGLE_SHARED_LIBVULKAN) namespace gl_vk { VkRect2D GetRect(const gl::Rectangle &source); VkFilter GetFilter(const GLenum filter); VkSamplerMipmapMode GetSamplerMipmapMode(const GLenum filter); VkSamplerAddressMode GetSamplerAddressMode(const GLenum wrap); VkPrimitiveTopology GetPrimitiveTopology(gl::PrimitiveMode mode); VkCullModeFlagBits GetCullMode(const gl::RasterizerState &rasterState); VkFrontFace GetFrontFace(GLenum frontFace, bool invertCullFace); VkSampleCountFlagBits GetSamples(GLint sampleCount); VkComponentSwizzle GetSwizzle(const GLenum swizzle); VkCompareOp GetCompareOp(const GLenum compareFunc); constexpr gl::ShaderMap kShaderStageMap = { {gl::ShaderType::Vertex, VK_SHADER_STAGE_VERTEX_BIT}, {gl::ShaderType::Fragment, VK_SHADER_STAGE_FRAGMENT_BIT}, {gl::ShaderType::Geometry, VK_SHADER_STAGE_GEOMETRY_BIT}, {gl::ShaderType::Compute, VK_SHADER_STAGE_COMPUTE_BIT}, }; void GetOffset(const gl::Offset &glOffset, VkOffset3D *vkOffset); void GetExtent(const gl::Extents &glExtent, VkExtent3D *vkExtent); VkImageType GetImageType(gl::TextureType textureType); VkImageViewType GetImageViewType(gl::TextureType textureType); VkColorComponentFlags GetColorComponentFlags(bool red, bool green, bool blue, bool alpha); VkShaderStageFlags GetShaderStageFlags(gl::ShaderBitSet activeShaders); void GetViewport(const gl::Rectangle &viewport, float nearPlane, float farPlane, bool invertViewport, GLint renderAreaHeight, VkViewport *viewportOut); void GetExtentsAndLayerCount(gl::TextureType textureType, const gl::Extents &extents, VkExtent3D *extentsOut, uint32_t *layerCountOut); } // namespace gl_vk namespace vk_gl { // The Vulkan back-end will not support a sample count of 1, because of a Vulkan specification // restriction: // // If the image was created with VkImageCreateInfo::samples equal to VK_SAMPLE_COUNT_1_BIT, the // instruction must: have MS = 0. // // This restriction was tracked in http://anglebug.com/4196 and Khronos-private Vulkan // specification issue https://gitlab.khronos.org/vulkan/vulkan/issues/1925. // // In addition, the Vulkan back-end will not support sample counts of 32 or 64, since there are no // standard sample locations for those sample counts. constexpr unsigned int kSupportedSampleCounts = (VK_SAMPLE_COUNT_2_BIT | VK_SAMPLE_COUNT_4_BIT | VK_SAMPLE_COUNT_8_BIT | VK_SAMPLE_COUNT_16_BIT); // Find set bits in sampleCounts and add the corresponding sample count to the set. void AddSampleCounts(VkSampleCountFlags sampleCounts, gl::SupportedSampleSet *outSet); // Return the maximum sample count with a bit set in |sampleCounts|. GLuint GetMaxSampleCount(VkSampleCountFlags sampleCounts); // Return a supported sample count that's at least as large as the requested one. GLuint GetSampleCount(VkSampleCountFlags supportedCounts, GLuint requestedCount); } // namespace vk_gl } // namespace rx #define ANGLE_VK_TRY(context, command) \ do \ { \ auto ANGLE_LOCAL_VAR = command; \ if (ANGLE_UNLIKELY(ANGLE_LOCAL_VAR != VK_SUCCESS)) \ { \ context->handleError(ANGLE_LOCAL_VAR, __FILE__, ANGLE_FUNCTION, __LINE__); \ return angle::Result::Stop; \ } \ } while (0) #define ANGLE_VK_CHECK(context, test, error) ANGLE_VK_TRY(context, test ? VK_SUCCESS : error) #define ANGLE_VK_CHECK_MATH(context, result) \ ANGLE_VK_CHECK(context, result, VK_ERROR_VALIDATION_FAILED_EXT) #define ANGLE_VK_CHECK_ALLOC(context, result) \ ANGLE_VK_CHECK(context, result, VK_ERROR_OUT_OF_HOST_MEMORY) #define ANGLE_VK_UNREACHABLE(context) \ UNREACHABLE(); \ ANGLE_VK_CHECK(context, false, VK_ERROR_FEATURE_NOT_PRESENT) #endif // LIBANGLE_RENDERER_VULKAN_VK_UTILS_H_