• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2015 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #include "GrVkUtil.h"
9 
10 #include "vk/GrVkGpu.h"
11 #include "SkSLCompiler.h"
12 
GrPixelConfigToVkFormat(GrPixelConfig config,VkFormat * format)13 bool GrPixelConfigToVkFormat(GrPixelConfig config, VkFormat* format) {
14     VkFormat dontCare;
15     if (!format) {
16         format = &dontCare;
17     }
18 
19     switch (config) {
20         case kUnknown_GrPixelConfig:
21             return false;
22         case kRGBA_8888_GrPixelConfig:
23             *format = VK_FORMAT_R8G8B8A8_UNORM;
24             return true;
25         case kBGRA_8888_GrPixelConfig:
26             *format = VK_FORMAT_B8G8R8A8_UNORM;
27             return true;
28         case kSRGBA_8888_GrPixelConfig:
29             *format = VK_FORMAT_R8G8B8A8_SRGB;
30             return true;
31         case kSBGRA_8888_GrPixelConfig:
32             *format = VK_FORMAT_B8G8R8A8_SRGB;
33             return true;
34         case kRGBA_1010102_GrPixelConfig:
35             *format = VK_FORMAT_A2B10G10R10_UNORM_PACK32;
36             return true;
37         case kRGB_565_GrPixelConfig:
38             *format = VK_FORMAT_R5G6B5_UNORM_PACK16;
39             return true;
40         case kRGBA_4444_GrPixelConfig:
41             // R4G4B4A4 is not required to be supported so we actually
42             // store the data is if it was B4G4R4A4 and swizzle in shaders
43             *format = VK_FORMAT_B4G4R4A4_UNORM_PACK16;
44             return true;
45         case kAlpha_8_GrPixelConfig: // fall through
46         case kAlpha_8_as_Red_GrPixelConfig:
47             *format = VK_FORMAT_R8_UNORM;
48             return true;
49         case kAlpha_8_as_Alpha_GrPixelConfig:
50             return false;
51         case kGray_8_GrPixelConfig:
52         case kGray_8_as_Red_GrPixelConfig:
53             *format = VK_FORMAT_R8_UNORM;
54             return true;
55         case kGray_8_as_Lum_GrPixelConfig:
56             return false;
57         case kRGBA_float_GrPixelConfig:
58             *format = VK_FORMAT_R32G32B32A32_SFLOAT;
59             return true;
60         case kRG_float_GrPixelConfig:
61             *format = VK_FORMAT_R32G32_SFLOAT;
62             return true;
63         case kRGBA_half_GrPixelConfig:
64             *format = VK_FORMAT_R16G16B16A16_SFLOAT;
65             return true;
66         case kAlpha_half_GrPixelConfig: // fall through
67         case kAlpha_half_as_Red_GrPixelConfig:
68             *format = VK_FORMAT_R16_SFLOAT;
69             return true;
70     }
71     SK_ABORT("Unexpected config");
72     return false;
73 }
74 
GrVkFormatToPixelConfig(VkFormat format)75 GrPixelConfig GrVkFormatToPixelConfig(VkFormat format) {
76     switch (format) {
77         case VK_FORMAT_R8G8B8A8_UNORM:
78             return kRGBA_8888_GrPixelConfig;
79         case VK_FORMAT_B8G8R8A8_UNORM:
80             return kBGRA_8888_GrPixelConfig;
81         case VK_FORMAT_R8G8B8A8_SRGB:
82             return kSRGBA_8888_GrPixelConfig;
83         case VK_FORMAT_B8G8R8A8_SRGB:
84             return kSBGRA_8888_GrPixelConfig;
85         case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
86             return kRGBA_1010102_GrPixelConfig;
87         case VK_FORMAT_R5G6B5_UNORM_PACK16:
88             return kRGB_565_GrPixelConfig;
89             break;
90         case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
91             // R4G4B4A4 is not required to be supported so we actually
92             // store RGBA_4444 data as B4G4R4A4.
93             return kRGBA_4444_GrPixelConfig;
94         case VK_FORMAT_R8_UNORM:
95             return kAlpha_8_GrPixelConfig;
96         case VK_FORMAT_R32G32B32A32_SFLOAT:
97             return kRGBA_float_GrPixelConfig;
98         case VK_FORMAT_R32G32_SFLOAT:
99             return kRG_float_GrPixelConfig;
100         case VK_FORMAT_R16G16B16A16_SFLOAT:
101             return kRGBA_half_GrPixelConfig;
102         case VK_FORMAT_R16_SFLOAT:
103             return kAlpha_half_GrPixelConfig;
104         default:
105             return kUnknown_GrPixelConfig;
106     }
107 }
108 
GrVkFormatPixelConfigPairIsValid(VkFormat format,GrPixelConfig config)109 bool GrVkFormatPixelConfigPairIsValid(VkFormat format, GrPixelConfig config) {
110     switch (format) {
111         case VK_FORMAT_R8G8B8A8_UNORM:
112             return kRGBA_8888_GrPixelConfig == config;
113         case VK_FORMAT_B8G8R8A8_UNORM:
114             return kBGRA_8888_GrPixelConfig == config;
115         case VK_FORMAT_R8G8B8A8_SRGB:
116             return kSRGBA_8888_GrPixelConfig == config;
117         case VK_FORMAT_B8G8R8A8_SRGB:
118             return kSBGRA_8888_GrPixelConfig == config;
119         case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
120             return kRGBA_1010102_GrPixelConfig == config;
121         case VK_FORMAT_R5G6B5_UNORM_PACK16:
122             return kRGB_565_GrPixelConfig == config;
123         case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
124             // R4G4B4A4 is not required to be supported so we actually
125             // store RGBA_4444 data as B4G4R4A4.
126             return kRGBA_4444_GrPixelConfig == config;
127         case VK_FORMAT_R8_UNORM:
128             return kAlpha_8_GrPixelConfig == config ||
129                    kAlpha_8_as_Red_GrPixelConfig == config ||
130                    kGray_8_GrPixelConfig == config ||
131                    kGray_8_as_Red_GrPixelConfig == config;
132         case VK_FORMAT_R32G32B32A32_SFLOAT:
133             return kRGBA_float_GrPixelConfig == config;
134         case VK_FORMAT_R32G32_SFLOAT:
135             return kRG_float_GrPixelConfig == config;
136         case VK_FORMAT_R16G16B16A16_SFLOAT:
137             return kRGBA_half_GrPixelConfig == config;
138         case VK_FORMAT_R16_SFLOAT:
139             return kAlpha_half_GrPixelConfig == config ||
140                    kAlpha_half_as_Red_GrPixelConfig == config;
141         default:
142             return false;
143     }
144 }
145 
GrVkFormatIsSupported(VkFormat format)146 bool GrVkFormatIsSupported(VkFormat format) {
147     switch (format) {
148         case VK_FORMAT_R8G8B8A8_UNORM:
149         case VK_FORMAT_B8G8R8A8_UNORM:
150         case VK_FORMAT_R8G8B8A8_SRGB:
151         case VK_FORMAT_B8G8R8A8_SRGB:
152         case VK_FORMAT_R8G8B8A8_SINT:
153         case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
154         case VK_FORMAT_R5G6B5_UNORM_PACK16:
155         case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
156         case VK_FORMAT_R8_UNORM:
157         case VK_FORMAT_R32G32B32A32_SFLOAT:
158         case VK_FORMAT_R32G32_SFLOAT:
159         case VK_FORMAT_R16G16B16A16_SFLOAT:
160         case VK_FORMAT_R16_SFLOAT:
161             return true;
162         default:
163             return false;
164     }
165 }
166 
GrVkFormatIsSRGB(VkFormat format,VkFormat * linearFormat)167 bool GrVkFormatIsSRGB(VkFormat format, VkFormat* linearFormat) {
168     VkFormat linearFmt = format;
169     switch (format) {
170         case VK_FORMAT_R8_SRGB:
171             linearFmt = VK_FORMAT_R8_UNORM;
172             break;
173         case VK_FORMAT_R8G8_SRGB:
174             linearFmt = VK_FORMAT_R8G8_UNORM;
175             break;
176         case VK_FORMAT_R8G8B8_SRGB:
177             linearFmt = VK_FORMAT_R8G8B8_UNORM;
178             break;
179         case VK_FORMAT_B8G8R8_SRGB:
180             linearFmt = VK_FORMAT_B8G8R8_UNORM;
181             break;
182         case VK_FORMAT_R8G8B8A8_SRGB:
183             linearFmt = VK_FORMAT_R8G8B8A8_UNORM;
184             break;
185         case VK_FORMAT_B8G8R8A8_SRGB:
186             linearFmt = VK_FORMAT_B8G8R8A8_UNORM;
187             break;
188         case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
189             linearFmt = VK_FORMAT_A8B8G8R8_UNORM_PACK32;
190             break;
191         case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
192             linearFmt = VK_FORMAT_BC1_RGB_UNORM_BLOCK;
193             break;
194         case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
195             linearFmt = VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
196             break;
197         case VK_FORMAT_BC2_SRGB_BLOCK:
198             linearFmt = VK_FORMAT_BC2_UNORM_BLOCK;
199             break;
200         case VK_FORMAT_BC3_SRGB_BLOCK:
201             linearFmt = VK_FORMAT_BC3_UNORM_BLOCK;
202             break;
203         case VK_FORMAT_BC7_SRGB_BLOCK:
204             linearFmt = VK_FORMAT_BC7_UNORM_BLOCK;
205             break;
206         case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
207             linearFmt = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;
208             break;
209         case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
210             linearFmt = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK;
211             break;
212         case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
213             linearFmt = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
214             break;
215         case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
216             linearFmt = VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
217             break;
218         case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
219             linearFmt = VK_FORMAT_ASTC_5x4_UNORM_BLOCK;
220             break;
221         case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
222             linearFmt = VK_FORMAT_ASTC_5x5_UNORM_BLOCK;
223             break;
224         case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
225             linearFmt = VK_FORMAT_ASTC_6x5_UNORM_BLOCK;
226             break;
227         case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
228             linearFmt = VK_FORMAT_ASTC_6x6_UNORM_BLOCK;
229             break;
230         case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
231             linearFmt = VK_FORMAT_ASTC_8x5_UNORM_BLOCK;
232             break;
233         case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
234             linearFmt = VK_FORMAT_ASTC_8x6_UNORM_BLOCK;
235             break;
236         case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
237             linearFmt = VK_FORMAT_ASTC_8x8_UNORM_BLOCK;
238             break;
239         case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
240             linearFmt = VK_FORMAT_ASTC_10x5_UNORM_BLOCK;
241             break;
242         case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
243             linearFmt = VK_FORMAT_ASTC_10x6_UNORM_BLOCK;
244             break;
245         case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
246             linearFmt = VK_FORMAT_ASTC_10x8_UNORM_BLOCK;
247             break;
248         case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
249             linearFmt = VK_FORMAT_ASTC_10x10_UNORM_BLOCK;
250             break;
251         case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
252             linearFmt = VK_FORMAT_ASTC_12x10_UNORM_BLOCK;
253             break;
254         case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
255             linearFmt = VK_FORMAT_ASTC_12x12_UNORM_BLOCK;
256             break;
257         default:
258             break;
259     }
260     if (linearFormat) {
261         *linearFormat = linearFmt;
262     }
263     return (linearFmt != format);
264 }
265 
GrSampleCountToVkSampleCount(uint32_t samples,VkSampleCountFlagBits * vkSamples)266 bool GrSampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits* vkSamples) {
267     SkASSERT(samples >= 1);
268     switch (samples) {
269         case 1:
270             *vkSamples = VK_SAMPLE_COUNT_1_BIT;
271             return true;
272         case 2:
273             *vkSamples = VK_SAMPLE_COUNT_2_BIT;
274             return true;
275         case 4:
276             *vkSamples = VK_SAMPLE_COUNT_4_BIT;
277             return true;
278         case 8:
279             *vkSamples = VK_SAMPLE_COUNT_8_BIT;
280             return true;
281         case 16:
282             *vkSamples = VK_SAMPLE_COUNT_16_BIT;
283             return true;
284         case 32:
285             *vkSamples = VK_SAMPLE_COUNT_32_BIT;
286             return true;
287         case 64:
288             *vkSamples = VK_SAMPLE_COUNT_64_BIT;
289             return true;
290         default:
291             return false;
292     }
293 }
294 
vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage)295 SkSL::Program::Kind vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage) {
296     if (VK_SHADER_STAGE_VERTEX_BIT == stage) {
297         return SkSL::Program::kVertex_Kind;
298     }
299     if (VK_SHADER_STAGE_GEOMETRY_BIT == stage) {
300         return SkSL::Program::kGeometry_Kind;
301     }
302     SkASSERT(VK_SHADER_STAGE_FRAGMENT_BIT == stage);
303     return SkSL::Program::kFragment_Kind;
304 }
305 
skiasl_kind_to_vk_shader_stage(SkSL::Program::Kind kind)306 VkShaderStageFlagBits skiasl_kind_to_vk_shader_stage(SkSL::Program::Kind kind) {
307     if (SkSL::Program::kVertex_Kind == kind) {
308         return VK_SHADER_STAGE_VERTEX_BIT;
309     }
310     if (SkSL::Program::kGeometry_Kind == kind) {
311         return VK_SHADER_STAGE_GEOMETRY_BIT;
312     }
313     SkASSERT(SkSL::Program::kFragment_Kind == kind);
314     return VK_SHADER_STAGE_FRAGMENT_BIT;
315 }
316 
GrCompileVkShaderModule(const GrVkGpu * gpu,const char * shaderString,VkShaderStageFlagBits stage,VkShaderModule * shaderModule,VkPipelineShaderStageCreateInfo * stageInfo,const SkSL::Program::Settings & settings,SkSL::Program::Inputs * outInputs)317 bool GrCompileVkShaderModule(const GrVkGpu* gpu,
318                              const char* shaderString,
319                              VkShaderStageFlagBits stage,
320                              VkShaderModule* shaderModule,
321                              VkPipelineShaderStageCreateInfo* stageInfo,
322                              const SkSL::Program::Settings& settings,
323                              SkSL::Program::Inputs* outInputs) {
324     std::unique_ptr<SkSL::Program> program = gpu->shaderCompiler()->convertProgram(
325                                                               vk_shader_stage_to_skiasl_kind(stage),
326                                                               SkSL::String(shaderString),
327                                                               settings);
328     if (!program) {
329         SkDebugf("SkSL error:\n%s\n", gpu->shaderCompiler()->errorText().c_str());
330         SkASSERT(false);
331     }
332     *outInputs = program->fInputs;
333     SkSL::String code;
334     if (!gpu->shaderCompiler()->toSPIRV(*program, &code)) {
335         SkDebugf("%s\n", gpu->shaderCompiler()->errorText().c_str());
336         return false;
337     }
338 
339     VkShaderModuleCreateInfo moduleCreateInfo;
340     memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo));
341     moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
342     moduleCreateInfo.pNext = nullptr;
343     moduleCreateInfo.flags = 0;
344     moduleCreateInfo.codeSize = code.size();
345     moduleCreateInfo.pCode = (const uint32_t*)code.c_str();
346 
347     VkResult err = GR_VK_CALL(gpu->vkInterface(), CreateShaderModule(gpu->device(),
348                                                                      &moduleCreateInfo,
349                                                                      nullptr,
350                                                                      shaderModule));
351     if (err) {
352         return false;
353     }
354 
355     memset(stageInfo, 0, sizeof(VkPipelineShaderStageCreateInfo));
356     stageInfo->sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
357     stageInfo->pNext = nullptr;
358     stageInfo->flags = 0;
359     stageInfo->stage = skiasl_kind_to_vk_shader_stage(program->fKind);
360     stageInfo->module = *shaderModule;
361     stageInfo->pName = "main";
362     stageInfo->pSpecializationInfo = nullptr;
363 
364     return true;
365 }
366