• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2022 Google LLC
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #ifndef skgpu_VulkanUtilsPriv_DEFINED
9 #define skgpu_VulkanUtilsPriv_DEFINED
10 
11 #include "include/core/SkColor.h"
12 #include "include/core/SkRefCnt.h"
13 #include "include/core/SkTextureCompressionType.h"
14 #include "include/gpu/vk/VulkanTypes.h"
15 #include "include/private/base/SkAssert.h"
16 #include "include/private/gpu/vk/SkiaVulkan.h"
17 #include "src/gpu/SkSLToBackend.h"
18 #include "src/sksl/codegen/SkSLSPIRVCodeGenerator.h"
19 
20 #ifdef SK_BUILD_FOR_ANDROID
21 #include <android/hardware_buffer.h>
22 #endif
23 
24 #include <cstdint>
25 #include <string>
26 #include <cstddef>
27 
28 namespace SkSL {
29 
30 enum class ProgramKind : int8_t;
31 struct ProgramInterface;
32 struct ProgramSettings;
33 struct ShaderCaps;
34 
35 }  // namespace SkSL
36 
37 namespace skgpu {
38 
39 class ShaderErrorHandler;
40 struct VulkanInterface;
41 struct VulkanBackendContext;
42 class VulkanExtensions;
43 
SkSLToSPIRV(const SkSL::ShaderCaps * caps,const std::string & sksl,SkSL::ProgramKind programKind,const SkSL::ProgramSettings & settings,std::string * spirv,SkSL::ProgramInterface * outInterface,ShaderErrorHandler * errorHandler)44 inline bool SkSLToSPIRV(const SkSL::ShaderCaps* caps,
45                         const std::string& sksl,
46                         SkSL::ProgramKind programKind,
47                         const SkSL::ProgramSettings& settings,
48                         std::string* spirv,
49                         SkSL::ProgramInterface* outInterface,
50                         ShaderErrorHandler* errorHandler) {
51     return SkSLToBackend(caps, &SkSL::ToSPIRV, /*backendLabel=*/nullptr,
52                          sksl, programKind, settings, spirv, outInterface, errorHandler);
53 }
54 
VkFormatChannels(VkFormat vkFormat)55 static constexpr uint32_t VkFormatChannels(VkFormat vkFormat) {
56     switch (vkFormat) {
57         case VK_FORMAT_R8G8B8A8_UNORM:           return kRGBA_SkColorChannelFlags;
58         case VK_FORMAT_R8_UNORM:                 return kRed_SkColorChannelFlag;
59         case VK_FORMAT_B8G8R8A8_UNORM:           return kRGBA_SkColorChannelFlags;
60         case VK_FORMAT_R5G6B5_UNORM_PACK16:      return kRGB_SkColorChannelFlags;
61         case VK_FORMAT_B5G6R5_UNORM_PACK16:      return kRGB_SkColorChannelFlags;
62         case VK_FORMAT_R16G16B16A16_SFLOAT:      return kRGBA_SkColorChannelFlags;
63         case VK_FORMAT_R16_SFLOAT:               return kRed_SkColorChannelFlag;
64         case VK_FORMAT_R8G8B8_UNORM:             return kRGB_SkColorChannelFlags;
65         case VK_FORMAT_R8G8_UNORM:               return kRG_SkColorChannelFlags;
66         case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return kRGBA_SkColorChannelFlags;
67         case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return kRGBA_SkColorChannelFlags;
68         case VK_FORMAT_B4G4R4A4_UNORM_PACK16:    return kRGBA_SkColorChannelFlags;
69         case VK_FORMAT_R4G4B4A4_UNORM_PACK16:    return kRGBA_SkColorChannelFlags;
70         case VK_FORMAT_R8G8B8A8_SRGB:            return kRGBA_SkColorChannelFlags;
71         case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:  return kRGB_SkColorChannelFlags;
72         case VK_FORMAT_BC1_RGB_UNORM_BLOCK:      return kRGB_SkColorChannelFlags;
73         case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:     return kRGBA_SkColorChannelFlags;
74         case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:     return kRGBA_SkColorChannelFlags;
75         case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:     return kRGBA_SkColorChannelFlags;
76         case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:     return kRGBA_SkColorChannelFlags;
77         case VK_FORMAT_R16_UNORM:                return kRed_SkColorChannelFlag;
78         case VK_FORMAT_R16G16_UNORM:             return kRG_SkColorChannelFlags;
79         case VK_FORMAT_R16G16B16A16_UNORM:       return kRGBA_SkColorChannelFlags;
80         case VK_FORMAT_R16G16_SFLOAT:            return kRG_SkColorChannelFlags;
81         case VK_FORMAT_S8_UINT:                  return 0;
82         case VK_FORMAT_D16_UNORM:                return 0;
83         case VK_FORMAT_D32_SFLOAT:               return 0;
84         case VK_FORMAT_D24_UNORM_S8_UINT:        return 0;
85         case VK_FORMAT_D32_SFLOAT_S8_UINT:       return 0;
86         default:                                 return 0;
87     }
88 }
89 
VkFormatBytesPerBlock(VkFormat vkFormat)90 static constexpr size_t VkFormatBytesPerBlock(VkFormat vkFormat) {
91     switch (vkFormat) {
92         case VK_FORMAT_R8G8B8A8_UNORM:            return 4;
93         case VK_FORMAT_R8_UNORM:                  return 1;
94         case VK_FORMAT_B8G8R8A8_UNORM:            return 4;
95         case VK_FORMAT_R5G6B5_UNORM_PACK16:       return 2;
96         case VK_FORMAT_B5G6R5_UNORM_PACK16:       return 2;
97         case VK_FORMAT_R16G16B16A16_SFLOAT:       return 8;
98         case VK_FORMAT_R16_SFLOAT:                return 2;
99         case VK_FORMAT_R8G8B8_UNORM:              return 3;
100         case VK_FORMAT_R8G8_UNORM:                return 2;
101         case VK_FORMAT_A2B10G10R10_UNORM_PACK32:  return 4;
102         case VK_FORMAT_A2R10G10B10_UNORM_PACK32:  return 4;
103         case VK_FORMAT_B4G4R4A4_UNORM_PACK16:     return 2;
104         case VK_FORMAT_R4G4B4A4_UNORM_PACK16:     return 2;
105         case VK_FORMAT_R8G8B8A8_SRGB:             return 4;
106         case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:   return 8;
107         case VK_FORMAT_BC1_RGB_UNORM_BLOCK:       return 8;
108         case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:      return 8;
109         case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:      return 16; // astc block bytes
110         case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:      return 16; // astc block bytes
111         case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:      return 16; // astc block bytes
112         case VK_FORMAT_R16_UNORM:                 return 2;
113         case VK_FORMAT_R16G16_UNORM:              return 4;
114         case VK_FORMAT_R16G16B16A16_UNORM:        return 8;
115         case VK_FORMAT_R16G16_SFLOAT:             return 4;
116         // Currently we are just over estimating this value to be used in gpu size calculations even
117         // though the actually size is probably less. We should instead treat planar formats similar
118         // to compressed textures that go through their own special query for calculating size.
119         case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: return 3;
120         case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:  return 3;
121         case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: return 6;
122         case VK_FORMAT_S8_UINT:                   return 1;
123         case VK_FORMAT_D16_UNORM:                 return 2;
124         case VK_FORMAT_D32_SFLOAT:                return 4;
125         case VK_FORMAT_D24_UNORM_S8_UINT:         return 4;
126         case VK_FORMAT_D32_SFLOAT_S8_UINT:        return 8;
127 
128         default:                                  return 0;
129     }
130 }
131 
VkFormatToCompressionType(VkFormat vkFormat)132 static constexpr SkTextureCompressionType VkFormatToCompressionType(VkFormat vkFormat) {
133     switch (vkFormat) {
134         case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: return SkTextureCompressionType::kETC2_RGB8_UNORM;
135         case VK_FORMAT_BC1_RGB_UNORM_BLOCK:     return SkTextureCompressionType::kBC1_RGB8_UNORM;
136         case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:    return SkTextureCompressionType::kBC1_RGBA8_UNORM;
137         case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:    return SkTextureCompressionType::kASTC_RGBA8_4x4;
138         case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:    return SkTextureCompressionType::kASTC_RGBA8_6x6;
139         case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:    return SkTextureCompressionType::kASTC_RGBA8_8x8;
140         default:                                return SkTextureCompressionType::kNone;
141     }
142 }
143 
VkFormatIsStencil(VkFormat format)144 static constexpr int VkFormatIsStencil(VkFormat format) {
145     switch (format) {
146         case VK_FORMAT_S8_UINT:
147         case VK_FORMAT_D24_UNORM_S8_UINT:
148         case VK_FORMAT_D32_SFLOAT_S8_UINT:
149             return true;
150         default:
151             return false;
152     }
153 }
154 
VkFormatIsDepth(VkFormat format)155 static constexpr int VkFormatIsDepth(VkFormat format) {
156     switch (format) {
157         case VK_FORMAT_D16_UNORM:
158         case VK_FORMAT_D32_SFLOAT:
159         case VK_FORMAT_D24_UNORM_S8_UINT:
160         case VK_FORMAT_D32_SFLOAT_S8_UINT:
161             return true;
162         default:
163             return false;
164     }
165 }
166 
VkFormatStencilBits(VkFormat format)167 static constexpr int VkFormatStencilBits(VkFormat format) {
168     switch (format) {
169         case VK_FORMAT_S8_UINT:
170             return 8;
171         case VK_FORMAT_D24_UNORM_S8_UINT:
172             return 8;
173         case VK_FORMAT_D32_SFLOAT_S8_UINT:
174             return 8;
175         default:
176             return 0;
177     }
178 }
179 
VkFormatNeedsYcbcrSampler(VkFormat format)180 static constexpr bool VkFormatNeedsYcbcrSampler(VkFormat format)  {
181     return format == VK_FORMAT_G8_B8R8_2PLANE_420_UNORM ||
182            format == VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM ||
183            format == VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16;
184 }
185 
SampleCountToVkSampleCount(uint32_t samples,VkSampleCountFlagBits * vkSamples)186 static constexpr bool SampleCountToVkSampleCount(uint32_t samples,
187                                                  VkSampleCountFlagBits* vkSamples) {
188     SkASSERT(samples >= 1);
189     switch (samples) {
190         case 1:
191             *vkSamples = VK_SAMPLE_COUNT_1_BIT;
192             return true;
193         case 2:
194             *vkSamples = VK_SAMPLE_COUNT_2_BIT;
195             return true;
196         case 4:
197             *vkSamples = VK_SAMPLE_COUNT_4_BIT;
198             return true;
199         case 8:
200             *vkSamples = VK_SAMPLE_COUNT_8_BIT;
201             return true;
202         case 16:
203             *vkSamples = VK_SAMPLE_COUNT_16_BIT;
204             return true;
205         default:
206             return false;
207     }
208 }
209 
210 /**
211  * Returns true if the format is compressed.
212  */
VkFormatIsCompressed(VkFormat vkFormat)213 static constexpr bool VkFormatIsCompressed(VkFormat vkFormat) {
214     switch (vkFormat) {
215         case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
216         case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
217         case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
218         case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
219         case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
220         case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
221             return true;
222         default:
223             return false;
224     }
225     SkUNREACHABLE;
226 }
227 
228 /**
229  * Returns a ptr to the requested extension feature struct or nullptr if it is not present.
230 */
GetExtensionFeatureStruct(const VkPhysicalDeviceFeatures2 & features,VkStructureType type)231 template<typename T> T* GetExtensionFeatureStruct(const VkPhysicalDeviceFeatures2& features,
232                                                   VkStructureType type) {
233     // All Vulkan structs that could be part of the features chain will start with the
234     // structure type followed by the pNext pointer. We cast to the CommonVulkanHeader
235     // so we can get access to the pNext for the next struct.
236     struct CommonVulkanHeader {
237         VkStructureType sType;
238         void*           pNext;
239     };
240 
241     void* pNext = features.pNext;
242     while (pNext) {
243         CommonVulkanHeader* header = static_cast<CommonVulkanHeader*>(pNext);
244         if (header->sType == type) {
245             return static_cast<T*>(pNext);
246         }
247         pNext = header->pNext;
248     }
249     return nullptr;
250 }
251 
252 /**
253  * Returns a populated VkSamplerYcbcrConversionCreateInfo object based on VulkanYcbcrConversionInfo
254 */
255 void SetupSamplerYcbcrConversionInfo(VkSamplerYcbcrConversionCreateInfo* outInfo,
256                                      const VulkanYcbcrConversionInfo& conversionInfo);
257 
VkFormatToStr(VkFormat vkFormat)258 static constexpr const char* VkFormatToStr(VkFormat vkFormat) {
259     switch (vkFormat) {
260         case VK_FORMAT_R8G8B8A8_UNORM:           return "R8G8B8A8_UNORM";
261         case VK_FORMAT_R8_UNORM:                 return "R8_UNORM";
262         case VK_FORMAT_B8G8R8A8_UNORM:           return "B8G8R8A8_UNORM";
263         case VK_FORMAT_R5G6B5_UNORM_PACK16:      return "R5G6B5_UNORM_PACK16";
264         case VK_FORMAT_B5G6R5_UNORM_PACK16:      return "B5G6R5_UNORM_PACK16";
265         case VK_FORMAT_R16G16B16A16_SFLOAT:      return "R16G16B16A16_SFLOAT";
266         case VK_FORMAT_R16_SFLOAT:               return "R16_SFLOAT";
267         case VK_FORMAT_R8G8B8_UNORM:             return "R8G8B8_UNORM";
268         case VK_FORMAT_R8G8_UNORM:               return "R8G8_UNORM";
269         case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return "A2B10G10R10_UNORM_PACK32";
270         case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return "A2R10G10B10_UNORM_PACK32";
271         case VK_FORMAT_B4G4R4A4_UNORM_PACK16:    return "B4G4R4A4_UNORM_PACK16";
272         case VK_FORMAT_R4G4B4A4_UNORM_PACK16:    return "R4G4B4A4_UNORM_PACK16";
273         case VK_FORMAT_R32G32B32A32_SFLOAT:      return "R32G32B32A32_SFLOAT";
274         case VK_FORMAT_R8G8B8A8_SRGB:            return "R8G8B8A8_SRGB";
275         case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:  return "ETC2_R8G8B8_UNORM_BLOCK";
276         case VK_FORMAT_BC1_RGB_UNORM_BLOCK:      return "BC1_RGB_UNORM_BLOCK";
277         case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:     return "BC1_RGBA_UNORM_BLOCK";
278         case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:     return "ASTC_4x4_UNORM_BLOCK";
279         case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:     return "ASTC_6x6_UNORM_BLOCK";
280         case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:     return "ASTC_8x8_UNORM_BLOCK";
281         case VK_FORMAT_R16_UNORM:                return "R16_UNORM";
282         case VK_FORMAT_R16G16_UNORM:             return "R16G16_UNORM";
283         case VK_FORMAT_R16G16B16A16_UNORM:       return "R16G16B16A16_UNORM";
284         case VK_FORMAT_R16G16_SFLOAT:            return "R16G16_SFLOAT";
285         case VK_FORMAT_S8_UINT:                  return "S8_UINT";
286         case VK_FORMAT_D16_UNORM:                return "D16_UNORM";
287         case VK_FORMAT_D32_SFLOAT:               return "D32_SFLOAT";
288         case VK_FORMAT_D24_UNORM_S8_UINT:        return "D24_UNORM_S8_UINT";
289         case VK_FORMAT_D32_SFLOAT_S8_UINT:       return "D32_SFLOAT_S8_UINT";
290 
291         default:                                 return "Unknown";
292     }
293 }
294 
295 #ifdef SK_BUILD_FOR_ANDROID
296 /**
297  * Vulkan AHardwareBuffer utility functions shared between graphite and ganesh
298 */
299 void GetYcbcrConversionInfoFromFormatProps(
300         VulkanYcbcrConversionInfo* outConversionInfo,
301         const VkAndroidHardwareBufferFormatPropertiesANDROID& formatProps);
302 
303 bool GetAHardwareBufferProperties(
304         VkAndroidHardwareBufferFormatPropertiesANDROID* outHwbFormatProps,
305         VkAndroidHardwareBufferPropertiesANDROID* outHwbProps,
306         const skgpu::VulkanInterface*,
307         const AHardwareBuffer*,
308         VkDevice);
309 
310 bool AllocateAndBindImageMemory(skgpu::VulkanAlloc* outVulkanAlloc,
311                                 VkImage,
312                                 const VkPhysicalDeviceMemoryProperties2&,
313                                 const VkAndroidHardwareBufferPropertiesANDROID&,
314                                 AHardwareBuffer*,
315                                 const skgpu::VulkanInterface*,
316                                 VkDevice);
317 
318 #endif // SK_BUILD_FOR_ANDROID
319 
320 /**
321  * Calls faultProc with faultContext; passes debug info if VK_EXT_device_fault is supported/enabled.
322  *
323  * Note: must only be called *after* receiving VK_ERROR_DEVICE_LOST.
324  */
325 void InvokeDeviceLostCallback(const skgpu::VulkanInterface* vulkanInterface,
326                               VkDevice vkDevice,
327                               skgpu::VulkanDeviceLostContext faultContext,
328                               skgpu::VulkanDeviceLostProc faultProc,
329                               bool supportsDeviceFaultInfoExtension);
330 
331 sk_sp<skgpu::VulkanInterface> MakeInterface(const skgpu::VulkanBackendContext&,
332                                             const skgpu::VulkanExtensions* extOverride,
333                                             uint32_t* physDevVersionOut,
334                                             uint32_t* instanceVersionOut);
335 
336 }  // namespace skgpu
337 
338 #endif // skgpu_VulkanUtilsPriv_DEFINED
339