1 /*
2 * Copyright 2022 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #ifndef skgpu_VulkanUtilsPriv_DEFINED
9 #define skgpu_VulkanUtilsPriv_DEFINED
10
11 #include "include/core/SkColor.h"
12 #include "include/core/SkRefCnt.h"
13 #include "include/gpu/vk/VulkanTypes.h"
14 #include "include/private/base/SkAssert.h"
15 #include "include/private/gpu/vk/SkiaVulkan.h"
16 #include "src/gpu/SkSLToBackend.h"
17 #include "src/sksl/codegen/SkSLSPIRVCodeGenerator.h"
18
19 #ifdef SK_BUILD_FOR_ANDROID
20 #include <android/hardware_buffer.h>
21 #endif
22
23 #include <cstdint>
24 #include <string>
25 #include <cstddef>
26
27 class SkStream;
28 class SkWStream;
29
30 namespace SkSL {
31
32 enum class ProgramKind : int8_t;
33 struct ProgramInterface;
34 struct ProgramSettings;
35 struct ShaderCaps;
36
37 } // namespace SkSL
38
39 namespace skgpu {
40
41 class ShaderErrorHandler;
42 struct VulkanInterface;
43 struct VulkanBackendContext;
44 class VulkanExtensions;
45
SkSLToSPIRV(const SkSL::ShaderCaps * caps,const std::string & sksl,SkSL::ProgramKind programKind,const SkSL::ProgramSettings & settings,std::string * spirv,SkSL::ProgramInterface * outInterface,ShaderErrorHandler * errorHandler)46 inline bool SkSLToSPIRV(const SkSL::ShaderCaps* caps,
47 const std::string& sksl,
48 SkSL::ProgramKind programKind,
49 const SkSL::ProgramSettings& settings,
50 std::string* spirv,
51 SkSL::ProgramInterface* outInterface,
52 ShaderErrorHandler* errorHandler) {
53 return SkSLToBackend(caps, &SkSL::ToSPIRV, /*backendLabel=*/nullptr,
54 sksl, programKind, settings, spirv, outInterface, errorHandler);
55 }
56
VkFormatChannels(VkFormat vkFormat)57 static constexpr uint32_t VkFormatChannels(VkFormat vkFormat) {
58 switch (vkFormat) {
59 case VK_FORMAT_R8G8B8A8_UNORM: return kRGBA_SkColorChannelFlags;
60 case VK_FORMAT_R8_UNORM: return kRed_SkColorChannelFlag;
61 case VK_FORMAT_B8G8R8A8_UNORM: return kRGBA_SkColorChannelFlags;
62 case VK_FORMAT_R5G6B5_UNORM_PACK16: return kRGB_SkColorChannelFlags;
63 case VK_FORMAT_B5G6R5_UNORM_PACK16: return kRGB_SkColorChannelFlags;
64 case VK_FORMAT_R16G16B16A16_SFLOAT: return kRGBA_SkColorChannelFlags;
65 case VK_FORMAT_R16_SFLOAT: return kRed_SkColorChannelFlag;
66 case VK_FORMAT_R8G8B8_UNORM: return kRGB_SkColorChannelFlags;
67 case VK_FORMAT_R8G8_UNORM: return kRG_SkColorChannelFlags;
68 case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return kRGBA_SkColorChannelFlags;
69 case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return kRGBA_SkColorChannelFlags;
70 case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return kRGBA_SkColorChannelFlags;
71 case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return kRGBA_SkColorChannelFlags;
72 case VK_FORMAT_R8G8B8A8_SRGB: return kRGBA_SkColorChannelFlags;
73 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: return kRGB_SkColorChannelFlags;
74 case VK_FORMAT_BC1_RGB_UNORM_BLOCK: return kRGB_SkColorChannelFlags;
75 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: return kRGBA_SkColorChannelFlags;
76 case VK_FORMAT_R16_UNORM: return kRed_SkColorChannelFlag;
77 case VK_FORMAT_R16G16_UNORM: return kRG_SkColorChannelFlags;
78 case VK_FORMAT_R16G16B16A16_UNORM: return kRGBA_SkColorChannelFlags;
79 case VK_FORMAT_R16G16_SFLOAT: return kRG_SkColorChannelFlags;
80 case VK_FORMAT_S8_UINT: return 0;
81 case VK_FORMAT_D16_UNORM: return 0;
82 case VK_FORMAT_D32_SFLOAT: return 0;
83 case VK_FORMAT_D24_UNORM_S8_UINT: return 0;
84 case VK_FORMAT_D32_SFLOAT_S8_UINT: return 0;
85 default: return 0;
86 }
87 }
88
VkFormatBytesPerBlock(VkFormat vkFormat)89 static constexpr size_t VkFormatBytesPerBlock(VkFormat vkFormat) {
90 switch (vkFormat) {
91 case VK_FORMAT_R8G8B8A8_UNORM: return 4;
92 case VK_FORMAT_R8_UNORM: return 1;
93 case VK_FORMAT_B8G8R8A8_UNORM: return 4;
94 case VK_FORMAT_R5G6B5_UNORM_PACK16: return 2;
95 case VK_FORMAT_B5G6R5_UNORM_PACK16: return 2;
96 case VK_FORMAT_R16G16B16A16_SFLOAT: return 8;
97 case VK_FORMAT_R16_SFLOAT: return 2;
98 case VK_FORMAT_R8G8B8_UNORM: return 3;
99 case VK_FORMAT_R8G8_UNORM: return 2;
100 case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return 4;
101 case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return 4;
102 case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return 2;
103 case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return 2;
104 case VK_FORMAT_R8G8B8A8_SRGB: return 4;
105 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: return 8;
106 case VK_FORMAT_BC1_RGB_UNORM_BLOCK: return 8;
107 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: return 8;
108 case VK_FORMAT_R16_UNORM: return 2;
109 case VK_FORMAT_R16G16_UNORM: return 4;
110 case VK_FORMAT_R16G16B16A16_UNORM: return 8;
111 case VK_FORMAT_R16G16_SFLOAT: return 4;
112 // Currently we are just over estimating this value to be used in gpu size calculations even
113 // though the actually size is probably less. We should instead treat planar formats similar
114 // to compressed textures that go through their own special query for calculating size.
115 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: return 3;
116 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: return 3;
117 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: return 6;
118 case VK_FORMAT_S8_UINT: return 1;
119 case VK_FORMAT_D16_UNORM: return 2;
120 case VK_FORMAT_D32_SFLOAT: return 4;
121 case VK_FORMAT_D24_UNORM_S8_UINT: return 4;
122 case VK_FORMAT_D32_SFLOAT_S8_UINT: return 8;
123
124 default: return 0;
125 }
126 }
127
VkFormatStencilBits(VkFormat format)128 static constexpr int VkFormatStencilBits(VkFormat format) {
129 switch (format) {
130 case VK_FORMAT_S8_UINT:
131 return 8;
132 case VK_FORMAT_D24_UNORM_S8_UINT:
133 return 8;
134 case VK_FORMAT_D32_SFLOAT_S8_UINT:
135 return 8;
136 default:
137 return 0;
138 }
139 }
140
VkFormatNeedsYcbcrSampler(VkFormat format)141 static constexpr bool VkFormatNeedsYcbcrSampler(VkFormat format) {
142 return format == VK_FORMAT_G8_B8R8_2PLANE_420_UNORM ||
143 format == VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM ||
144 format == VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16;
145 }
146
SampleCountToVkSampleCount(uint32_t samples,VkSampleCountFlagBits * vkSamples)147 static constexpr bool SampleCountToVkSampleCount(uint32_t samples,
148 VkSampleCountFlagBits* vkSamples) {
149 SkASSERT(samples >= 1);
150 switch (samples) {
151 case 1:
152 *vkSamples = VK_SAMPLE_COUNT_1_BIT;
153 return true;
154 case 2:
155 *vkSamples = VK_SAMPLE_COUNT_2_BIT;
156 return true;
157 case 4:
158 *vkSamples = VK_SAMPLE_COUNT_4_BIT;
159 return true;
160 case 8:
161 *vkSamples = VK_SAMPLE_COUNT_8_BIT;
162 return true;
163 case 16:
164 *vkSamples = VK_SAMPLE_COUNT_16_BIT;
165 return true;
166 default:
167 return false;
168 }
169 }
170
171 /**
172 * Returns true if the format is compressed.
173 */
VkFormatIsCompressed(VkFormat vkFormat)174 static constexpr bool VkFormatIsCompressed(VkFormat vkFormat) {
175 switch (vkFormat) {
176 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
177 case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
178 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
179 return true;
180 default:
181 return false;
182 }
183 SkUNREACHABLE;
184 }
185
186 /**
187 * Returns a ptr to the requested extension feature struct or nullptr if it is not present.
188 */
GetExtensionFeatureStruct(const VkPhysicalDeviceFeatures2 & features,VkStructureType type)189 template<typename T> T* GetExtensionFeatureStruct(const VkPhysicalDeviceFeatures2& features,
190 VkStructureType type) {
191 // All Vulkan structs that could be part of the features chain will start with the
192 // structure type followed by the pNext pointer. We cast to the CommonVulkanHeader
193 // so we can get access to the pNext for the next struct.
194 struct CommonVulkanHeader {
195 VkStructureType sType;
196 void* pNext;
197 };
198
199 void* pNext = features.pNext;
200 while (pNext) {
201 CommonVulkanHeader* header = static_cast<CommonVulkanHeader*>(pNext);
202 if (header->sType == type) {
203 return static_cast<T*>(pNext);
204 }
205 pNext = header->pNext;
206 }
207 return nullptr;
208 }
209
210 /**
211 * Returns a populated VkSamplerYcbcrConversionCreateInfo object based on VulkanYcbcrConversionInfo
212 */
213 void SetupSamplerYcbcrConversionInfo(VkSamplerYcbcrConversionCreateInfo* outInfo,
214 const VulkanYcbcrConversionInfo& conversionInfo);
215
VkFormatToStr(VkFormat vkFormat)216 static constexpr const char* VkFormatToStr(VkFormat vkFormat) {
217 switch (vkFormat) {
218 case VK_FORMAT_R8G8B8A8_UNORM: return "R8G8B8A8_UNORM";
219 case VK_FORMAT_R8_UNORM: return "R8_UNORM";
220 case VK_FORMAT_B8G8R8A8_UNORM: return "B8G8R8A8_UNORM";
221 case VK_FORMAT_R5G6B5_UNORM_PACK16: return "R5G6B5_UNORM_PACK16";
222 case VK_FORMAT_B5G6R5_UNORM_PACK16: return "B5G6R5_UNORM_PACK16";
223 case VK_FORMAT_R16G16B16A16_SFLOAT: return "R16G16B16A16_SFLOAT";
224 case VK_FORMAT_R16_SFLOAT: return "R16_SFLOAT";
225 case VK_FORMAT_R8G8B8_UNORM: return "R8G8B8_UNORM";
226 case VK_FORMAT_R8G8_UNORM: return "R8G8_UNORM";
227 case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return "A2B10G10R10_UNORM_PACK32";
228 case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return "A2R10G10B10_UNORM_PACK32";
229 case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return "B4G4R4A4_UNORM_PACK16";
230 case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return "R4G4B4A4_UNORM_PACK16";
231 case VK_FORMAT_R32G32B32A32_SFLOAT: return "R32G32B32A32_SFLOAT";
232 case VK_FORMAT_R8G8B8A8_SRGB: return "R8G8B8A8_SRGB";
233 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: return "ETC2_R8G8B8_UNORM_BLOCK";
234 case VK_FORMAT_BC1_RGB_UNORM_BLOCK: return "BC1_RGB_UNORM_BLOCK";
235 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: return "BC1_RGBA_UNORM_BLOCK";
236 case VK_FORMAT_R16_UNORM: return "R16_UNORM";
237 case VK_FORMAT_R16G16_UNORM: return "R16G16_UNORM";
238 case VK_FORMAT_R16G16B16A16_UNORM: return "R16G16B16A16_UNORM";
239 case VK_FORMAT_R16G16_SFLOAT: return "R16G16_SFLOAT";
240 case VK_FORMAT_S8_UINT: return "S8_UINT";
241 case VK_FORMAT_D16_UNORM: return "D16_UNORM";
242 case VK_FORMAT_D32_SFLOAT: return "D32_SFLOAT";
243 case VK_FORMAT_D24_UNORM_S8_UINT: return "D24_UNORM_S8_UINT";
244 case VK_FORMAT_D32_SFLOAT_S8_UINT: return "D32_SFLOAT_S8_UINT";
245
246 default: return "Unknown";
247 }
248 }
249
250 [[nodiscard]] bool SerializeVkYCbCrInfo(SkWStream*, const VulkanYcbcrConversionInfo&);
251 [[nodiscard]] bool DeserializeVkYCbCrInfo(SkStream*, VulkanYcbcrConversionInfo* out);
252
253 #ifdef SK_BUILD_FOR_ANDROID
254 /**
255 * Vulkan AHardwareBuffer utility functions shared between graphite and ganesh
256 */
257 void GetYcbcrConversionInfoFromFormatProps(
258 VulkanYcbcrConversionInfo* outConversionInfo,
259 const VkAndroidHardwareBufferFormatPropertiesANDROID& formatProps);
260
261 bool GetAHardwareBufferProperties(
262 VkAndroidHardwareBufferFormatPropertiesANDROID* outHwbFormatProps,
263 VkAndroidHardwareBufferPropertiesANDROID* outHwbProps,
264 const skgpu::VulkanInterface*,
265 const AHardwareBuffer*,
266 VkDevice);
267
268 bool AllocateAndBindImageMemory(skgpu::VulkanAlloc* outVulkanAlloc,
269 VkImage,
270 const VkPhysicalDeviceMemoryProperties2&,
271 const VkAndroidHardwareBufferPropertiesANDROID&,
272 AHardwareBuffer*,
273 const skgpu::VulkanInterface*,
274 VkDevice);
275
276 #endif // SK_BUILD_FOR_ANDROID
277
278 /**
279 * Calls faultProc with faultContext; passes debug info if VK_EXT_device_fault is supported/enabled.
280 *
281 * Note: must only be called *after* receiving VK_ERROR_DEVICE_LOST.
282 */
283 void InvokeDeviceLostCallback(const skgpu::VulkanInterface* vulkanInterface,
284 VkDevice vkDevice,
285 skgpu::VulkanDeviceLostContext faultContext,
286 skgpu::VulkanDeviceLostProc faultProc,
287 bool supportsDeviceFaultInfoExtension);
288
289 sk_sp<skgpu::VulkanInterface> MakeInterface(const skgpu::VulkanBackendContext&,
290 const skgpu::VulkanExtensions* extOverride,
291 uint32_t* physDevVersionOut,
292 uint32_t* instanceVersionOut);
293
294 } // namespace skgpu
295
296 #endif // skgpu_VulkanUtilsPriv_DEFINED
297