1 /*
2 * Copyright 2022 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #ifndef skgpu_VulkanUtilsPriv_DEFINED
9 #define skgpu_VulkanUtilsPriv_DEFINED
10
11 #include "include/core/SkColor.h"
12 #include "include/core/SkTextureCompressionType.h"
13 #include "include/gpu/vk/VulkanTypes.h"
14 #include "include/private/base/SkAssert.h"
15 #include "include/private/gpu/vk/SkiaVulkan.h"
16 #include "src/gpu/PipelineUtils.h"
17 #include "src/sksl/codegen/SkSLSPIRVCodeGenerator.h"
18
19 #ifdef SK_BUILD_FOR_ANDROID
20 #include <android/hardware_buffer.h>
21 #endif
22
23 #include <cstdint>
24 #include <string>
25 #include <cstddef>
26
27 namespace SkSL {
28
29 enum class ProgramKind : int8_t;
30 struct ProgramInterface;
31 struct ProgramSettings;
32 struct ShaderCaps;
33
34 } // namespace SkSL
35
36 namespace skgpu {
37
38 class ShaderErrorHandler;
39 struct VulkanInterface;
40
SkSLToSPIRV(const SkSL::ShaderCaps * caps,const std::string & sksl,SkSL::ProgramKind programKind,const SkSL::ProgramSettings & settings,std::string * spirv,SkSL::ProgramInterface * outInterface,ShaderErrorHandler * errorHandler)41 inline bool SkSLToSPIRV(const SkSL::ShaderCaps* caps,
42 const std::string& sksl,
43 SkSL::ProgramKind programKind,
44 const SkSL::ProgramSettings& settings,
45 std::string* spirv,
46 SkSL::ProgramInterface* outInterface,
47 ShaderErrorHandler* errorHandler) {
48 return SkSLToBackend(caps, &SkSL::ToSPIRV, /*backendLabel=*/nullptr,
49 sksl, programKind, settings, spirv, outInterface, errorHandler);
50 }
51
VkFormatChannels(VkFormat vkFormat)52 static constexpr uint32_t VkFormatChannels(VkFormat vkFormat) {
53 switch (vkFormat) {
54 case VK_FORMAT_R8G8B8A8_UNORM: return kRGBA_SkColorChannelFlags;
55 case VK_FORMAT_R8_UNORM: return kRed_SkColorChannelFlag;
56 case VK_FORMAT_B8G8R8A8_UNORM: return kRGBA_SkColorChannelFlags;
57 case VK_FORMAT_R5G6B5_UNORM_PACK16: return kRGB_SkColorChannelFlags;
58 case VK_FORMAT_B5G6R5_UNORM_PACK16: return kRGB_SkColorChannelFlags;
59 case VK_FORMAT_R16G16B16A16_SFLOAT: return kRGBA_SkColorChannelFlags;
60 case VK_FORMAT_R16_SFLOAT: return kRed_SkColorChannelFlag;
61 case VK_FORMAT_R8G8B8_UNORM: return kRGB_SkColorChannelFlags;
62 case VK_FORMAT_R8G8_UNORM: return kRG_SkColorChannelFlags;
63 case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return kRGBA_SkColorChannelFlags;
64 case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return kRGBA_SkColorChannelFlags;
65 case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return kRGBA_SkColorChannelFlags;
66 case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return kRGBA_SkColorChannelFlags;
67 case VK_FORMAT_R8G8B8A8_SRGB: return kRGBA_SkColorChannelFlags;
68 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: return kRGB_SkColorChannelFlags;
69 case VK_FORMAT_BC1_RGB_UNORM_BLOCK: return kRGB_SkColorChannelFlags;
70 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: return kRGBA_SkColorChannelFlags;
71 case VK_FORMAT_R16_UNORM: return kRed_SkColorChannelFlag;
72 case VK_FORMAT_R16G16_UNORM: return kRG_SkColorChannelFlags;
73 case VK_FORMAT_R16G16B16A16_UNORM: return kRGBA_SkColorChannelFlags;
74 case VK_FORMAT_R16G16_SFLOAT: return kRG_SkColorChannelFlags;
75 case VK_FORMAT_S8_UINT: return 0;
76 case VK_FORMAT_D16_UNORM: return 0;
77 case VK_FORMAT_D32_SFLOAT: return 0;
78 case VK_FORMAT_D24_UNORM_S8_UINT: return 0;
79 case VK_FORMAT_D32_SFLOAT_S8_UINT: return 0;
80 default: return 0;
81 }
82 }
83
VkFormatBytesPerBlock(VkFormat vkFormat)84 static constexpr size_t VkFormatBytesPerBlock(VkFormat vkFormat) {
85 switch (vkFormat) {
86 case VK_FORMAT_R8G8B8A8_UNORM: return 4;
87 case VK_FORMAT_R8_UNORM: return 1;
88 case VK_FORMAT_B8G8R8A8_UNORM: return 4;
89 case VK_FORMAT_R5G6B5_UNORM_PACK16: return 2;
90 case VK_FORMAT_B5G6R5_UNORM_PACK16: return 2;
91 case VK_FORMAT_R16G16B16A16_SFLOAT: return 8;
92 case VK_FORMAT_R16_SFLOAT: return 2;
93 case VK_FORMAT_R8G8B8_UNORM: return 3;
94 case VK_FORMAT_R8G8_UNORM: return 2;
95 case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return 4;
96 case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return 4;
97 case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return 2;
98 case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return 2;
99 case VK_FORMAT_R8G8B8A8_SRGB: return 4;
100 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: return 8;
101 case VK_FORMAT_BC1_RGB_UNORM_BLOCK: return 8;
102 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: return 8;
103 case VK_FORMAT_R16_UNORM: return 2;
104 case VK_FORMAT_R16G16_UNORM: return 4;
105 case VK_FORMAT_R16G16B16A16_UNORM: return 8;
106 case VK_FORMAT_R16G16_SFLOAT: return 4;
107 // Currently we are just over estimating this value to be used in gpu size calculations even
108 // though the actually size is probably less. We should instead treat planar formats similar
109 // to compressed textures that go through their own special query for calculating size.
110 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: return 3;
111 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: return 3;
112 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: return 6;
113 case VK_FORMAT_S8_UINT: return 1;
114 case VK_FORMAT_D16_UNORM: return 2;
115 case VK_FORMAT_D32_SFLOAT: return 4;
116 case VK_FORMAT_D24_UNORM_S8_UINT: return 4;
117 case VK_FORMAT_D32_SFLOAT_S8_UINT: return 8;
118
119 default: return 0;
120 }
121 }
122
VkFormatToCompressionType(VkFormat vkFormat)123 static constexpr SkTextureCompressionType VkFormatToCompressionType(VkFormat vkFormat) {
124 switch (vkFormat) {
125 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: return SkTextureCompressionType::kETC2_RGB8_UNORM;
126 case VK_FORMAT_BC1_RGB_UNORM_BLOCK: return SkTextureCompressionType::kBC1_RGB8_UNORM;
127 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: return SkTextureCompressionType::kBC1_RGBA8_UNORM;
128 default: return SkTextureCompressionType::kNone;
129 }
130 }
131
VkFormatIsStencil(VkFormat format)132 static constexpr int VkFormatIsStencil(VkFormat format) {
133 switch (format) {
134 case VK_FORMAT_S8_UINT:
135 case VK_FORMAT_D24_UNORM_S8_UINT:
136 case VK_FORMAT_D32_SFLOAT_S8_UINT:
137 return true;
138 default:
139 return false;
140 }
141 }
142
VkFormatIsDepth(VkFormat format)143 static constexpr int VkFormatIsDepth(VkFormat format) {
144 switch (format) {
145 case VK_FORMAT_D16_UNORM:
146 case VK_FORMAT_D32_SFLOAT:
147 case VK_FORMAT_D24_UNORM_S8_UINT:
148 case VK_FORMAT_D32_SFLOAT_S8_UINT:
149 return true;
150 default:
151 return false;
152 }
153 }
154
VkFormatStencilBits(VkFormat format)155 static constexpr int VkFormatStencilBits(VkFormat format) {
156 switch (format) {
157 case VK_FORMAT_S8_UINT:
158 return 8;
159 case VK_FORMAT_D24_UNORM_S8_UINT:
160 return 8;
161 case VK_FORMAT_D32_SFLOAT_S8_UINT:
162 return 8;
163 default:
164 return 0;
165 }
166 }
167
VkFormatNeedsYcbcrSampler(VkFormat format)168 static constexpr bool VkFormatNeedsYcbcrSampler(VkFormat format) {
169 return format == VK_FORMAT_G8_B8R8_2PLANE_420_UNORM ||
170 format == VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM ||
171 format == VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16;
172 }
173
SampleCountToVkSampleCount(uint32_t samples,VkSampleCountFlagBits * vkSamples)174 static constexpr bool SampleCountToVkSampleCount(uint32_t samples,
175 VkSampleCountFlagBits* vkSamples) {
176 SkASSERT(samples >= 1);
177 switch (samples) {
178 case 1:
179 *vkSamples = VK_SAMPLE_COUNT_1_BIT;
180 return true;
181 case 2:
182 *vkSamples = VK_SAMPLE_COUNT_2_BIT;
183 return true;
184 case 4:
185 *vkSamples = VK_SAMPLE_COUNT_4_BIT;
186 return true;
187 case 8:
188 *vkSamples = VK_SAMPLE_COUNT_8_BIT;
189 return true;
190 case 16:
191 *vkSamples = VK_SAMPLE_COUNT_16_BIT;
192 return true;
193 default:
194 return false;
195 }
196 }
197
198 /**
199 * Returns true if the format is compressed.
200 */
VkFormatIsCompressed(VkFormat vkFormat)201 static constexpr bool VkFormatIsCompressed(VkFormat vkFormat) {
202 switch (vkFormat) {
203 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
204 case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
205 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
206 return true;
207 default:
208 return false;
209 }
210 SkUNREACHABLE;
211 }
212
213 /**
214 * Returns a ptr to the requested extension feature struct or nullptr if it is not present.
215 */
GetExtensionFeatureStruct(const VkPhysicalDeviceFeatures2 & features,VkStructureType type)216 template<typename T> T* GetExtensionFeatureStruct(const VkPhysicalDeviceFeatures2& features,
217 VkStructureType type) {
218 // All Vulkan structs that could be part of the features chain will start with the
219 // structure type followed by the pNext pointer. We cast to the CommonVulkanHeader
220 // so we can get access to the pNext for the next struct.
221 struct CommonVulkanHeader {
222 VkStructureType sType;
223 void* pNext;
224 };
225
226 void* pNext = features.pNext;
227 while (pNext) {
228 CommonVulkanHeader* header = static_cast<CommonVulkanHeader*>(pNext);
229 if (header->sType == type) {
230 return static_cast<T*>(pNext);
231 }
232 pNext = header->pNext;
233 }
234 return nullptr;
235 }
236
237 /**
238 * Returns a populated VkSamplerYcbcrConversionCreateInfo object based on VulkanYcbcrConversionInfo
239 */
240 void SetupSamplerYcbcrConversionInfo(VkSamplerYcbcrConversionCreateInfo* outInfo,
241 const VulkanYcbcrConversionInfo& conversionInfo);
242
VkFormatToStr(VkFormat vkFormat)243 static constexpr const char* VkFormatToStr(VkFormat vkFormat) {
244 switch (vkFormat) {
245 case VK_FORMAT_R8G8B8A8_UNORM: return "R8G8B8A8_UNORM";
246 case VK_FORMAT_R8_UNORM: return "R8_UNORM";
247 case VK_FORMAT_B8G8R8A8_UNORM: return "B8G8R8A8_UNORM";
248 case VK_FORMAT_R5G6B5_UNORM_PACK16: return "R5G6B5_UNORM_PACK16";
249 case VK_FORMAT_B5G6R5_UNORM_PACK16: return "B5G6R5_UNORM_PACK16";
250 case VK_FORMAT_R16G16B16A16_SFLOAT: return "R16G16B16A16_SFLOAT";
251 case VK_FORMAT_R16_SFLOAT: return "R16_SFLOAT";
252 case VK_FORMAT_R8G8B8_UNORM: return "R8G8B8_UNORM";
253 case VK_FORMAT_R8G8_UNORM: return "R8G8_UNORM";
254 case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return "A2B10G10R10_UNORM_PACK32";
255 case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return "A2R10G10B10_UNORM_PACK32";
256 case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return "B4G4R4A4_UNORM_PACK16";
257 case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return "R4G4B4A4_UNORM_PACK16";
258 case VK_FORMAT_R32G32B32A32_SFLOAT: return "R32G32B32A32_SFLOAT";
259 case VK_FORMAT_R8G8B8A8_SRGB: return "R8G8B8A8_SRGB";
260 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: return "ETC2_R8G8B8_UNORM_BLOCK";
261 case VK_FORMAT_BC1_RGB_UNORM_BLOCK: return "BC1_RGB_UNORM_BLOCK";
262 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: return "BC1_RGBA_UNORM_BLOCK";
263 case VK_FORMAT_R16_UNORM: return "R16_UNORM";
264 case VK_FORMAT_R16G16_UNORM: return "R16G16_UNORM";
265 case VK_FORMAT_R16G16B16A16_UNORM: return "R16G16B16A16_UNORM";
266 case VK_FORMAT_R16G16_SFLOAT: return "R16G16_SFLOAT";
267 case VK_FORMAT_S8_UINT: return "S8_UINT";
268 case VK_FORMAT_D16_UNORM: return "D16_UNORM";
269 case VK_FORMAT_D32_SFLOAT: return "D32_SFLOAT";
270 case VK_FORMAT_D24_UNORM_S8_UINT: return "D24_UNORM_S8_UINT";
271 case VK_FORMAT_D32_SFLOAT_S8_UINT: return "D32_SFLOAT_S8_UINT";
272
273 default: return "Unknown";
274 }
275 }
276
277 #ifdef SK_BUILD_FOR_ANDROID
278 /**
279 * Vulkan AHardwareBuffer utility functions shared between graphite and ganesh
280 */
281 void GetYcbcrConversionInfoFromFormatProps(
282 VulkanYcbcrConversionInfo* outConversionInfo,
283 const VkAndroidHardwareBufferFormatPropertiesANDROID& formatProps);
284
285 bool GetAHardwareBufferProperties(
286 VkAndroidHardwareBufferFormatPropertiesANDROID* outHwbFormatProps,
287 VkAndroidHardwareBufferPropertiesANDROID* outHwbProps,
288 const skgpu::VulkanInterface*,
289 const AHardwareBuffer*,
290 VkDevice);
291
292 bool AllocateAndBindImageMemory(skgpu::VulkanAlloc* outVulkanAlloc,
293 VkImage,
294 const VkPhysicalDeviceMemoryProperties2&,
295 const VkAndroidHardwareBufferPropertiesANDROID&,
296 AHardwareBuffer*,
297 const skgpu::VulkanInterface*,
298 VkDevice);
299
300 #endif // SK_BUILD_FOR_ANDROID
301
302 /**
303 * Calls faultProc with faultContext; passes debug info if VK_EXT_device_fault is supported/enabled.
304 *
305 * Note: must only be called *after* receiving VK_ERROR_DEVICE_LOST.
306 */
307 void InvokeDeviceLostCallback(const skgpu::VulkanInterface* vulkanInterface,
308 VkDevice vkDevice,
309 skgpu::VulkanDeviceLostContext faultContext,
310 skgpu::VulkanDeviceLostProc faultProc,
311 bool supportsDeviceFaultInfoExtension);
312
313 } // namespace skgpu
314
315 #endif // skgpu_VulkanUtilsPriv_DEFINED
316