1 /*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "GrVkUtil.h"
9
10 #include "vk/GrVkGpu.h"
11 #include "SkSLCompiler.h"
12
GrPixelConfigToVkFormat(GrPixelConfig config,VkFormat * format)13 bool GrPixelConfigToVkFormat(GrPixelConfig config, VkFormat* format) {
14 VkFormat dontCare;
15 if (!format) {
16 format = &dontCare;
17 }
18
19 switch (config) {
20 case kUnknown_GrPixelConfig:
21 return false;
22 case kRGBA_8888_GrPixelConfig:
23 *format = VK_FORMAT_R8G8B8A8_UNORM;
24 return true;
25 case kBGRA_8888_GrPixelConfig:
26 *format = VK_FORMAT_B8G8R8A8_UNORM;
27 return true;
28 case kSRGBA_8888_GrPixelConfig:
29 *format = VK_FORMAT_R8G8B8A8_SRGB;
30 return true;
31 case kSBGRA_8888_GrPixelConfig:
32 *format = VK_FORMAT_B8G8R8A8_SRGB;
33 return true;
34 case kRGBA_8888_sint_GrPixelConfig:
35 *format = VK_FORMAT_R8G8B8A8_SINT;
36 return true;
37 case kRGB_565_GrPixelConfig:
38 *format = VK_FORMAT_R5G6B5_UNORM_PACK16;
39 return true;
40 case kRGBA_4444_GrPixelConfig:
41 // R4G4B4A4 is not required to be supported so we actually
42 // store the data is if it was B4G4R4A4 and swizzle in shaders
43 *format = VK_FORMAT_B4G4R4A4_UNORM_PACK16;
44 return true;
45 case kAlpha_8_GrPixelConfig: // fall through
46 case kAlpha_8_as_Red_GrPixelConfig:
47 *format = VK_FORMAT_R8_UNORM;
48 return true;
49 case kAlpha_8_as_Alpha_GrPixelConfig:
50 return false;
51 case kGray_8_GrPixelConfig:
52 case kGray_8_as_Red_GrPixelConfig:
53 *format = VK_FORMAT_R8_UNORM;
54 return true;
55 case kGray_8_as_Lum_GrPixelConfig:
56 return false;
57 case kRGBA_float_GrPixelConfig:
58 *format = VK_FORMAT_R32G32B32A32_SFLOAT;
59 return true;
60 case kRG_float_GrPixelConfig:
61 *format = VK_FORMAT_R32G32_SFLOAT;
62 return true;
63 case kRGBA_half_GrPixelConfig:
64 *format = VK_FORMAT_R16G16B16A16_SFLOAT;
65 return true;
66 case kAlpha_half_GrPixelConfig: // fall through
67 case kAlpha_half_as_Red_GrPixelConfig:
68 *format = VK_FORMAT_R16_SFLOAT;
69 return true;
70 }
71 SK_ABORT("Unexpected config");
72 return false;
73 }
74
GrVkFormatToPixelConfig(VkFormat format)75 GrPixelConfig GrVkFormatToPixelConfig(VkFormat format) {
76 switch (format) {
77 case VK_FORMAT_R8G8B8A8_UNORM:
78 return kRGBA_8888_GrPixelConfig;
79 case VK_FORMAT_B8G8R8A8_UNORM:
80 return kBGRA_8888_GrPixelConfig;
81 case VK_FORMAT_R8G8B8A8_SRGB:
82 return kSRGBA_8888_GrPixelConfig;
83 case VK_FORMAT_B8G8R8A8_SRGB:
84 return kSBGRA_8888_GrPixelConfig;
85 case VK_FORMAT_R8G8B8A8_SINT:
86 return kRGBA_8888_sint_GrPixelConfig;
87 case VK_FORMAT_R5G6B5_UNORM_PACK16:
88 return kRGB_565_GrPixelConfig;
89 break;
90 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
91 // R4G4B4A4 is not required to be supported so we actually
92 // store RGBA_4444 data as B4G4R4A4.
93 return kRGBA_4444_GrPixelConfig;
94 case VK_FORMAT_R8_UNORM:
95 return kAlpha_8_GrPixelConfig;
96 case VK_FORMAT_R32G32B32A32_SFLOAT:
97 return kRGBA_float_GrPixelConfig;
98 case VK_FORMAT_R32G32_SFLOAT:
99 return kRG_float_GrPixelConfig;
100 case VK_FORMAT_R16G16B16A16_SFLOAT:
101 return kRGBA_half_GrPixelConfig;
102 case VK_FORMAT_R16_SFLOAT:
103 return kAlpha_half_GrPixelConfig;
104 default:
105 return kUnknown_GrPixelConfig;
106 }
107 }
108
GrVkFormatPixelConfigPairIsValid(VkFormat format,GrPixelConfig config)109 bool GrVkFormatPixelConfigPairIsValid(VkFormat format, GrPixelConfig config) {
110 switch (format) {
111 case VK_FORMAT_R8G8B8A8_UNORM:
112 return kRGBA_8888_GrPixelConfig == config;
113 case VK_FORMAT_B8G8R8A8_UNORM:
114 return kBGRA_8888_GrPixelConfig == config;
115 case VK_FORMAT_R8G8B8A8_SRGB:
116 return kSRGBA_8888_GrPixelConfig == config;
117 case VK_FORMAT_B8G8R8A8_SRGB:
118 return kSBGRA_8888_GrPixelConfig == config;
119 case VK_FORMAT_R8G8B8A8_SINT:
120 return kRGBA_8888_sint_GrPixelConfig == config;
121 case VK_FORMAT_R5G6B5_UNORM_PACK16:
122 return kRGB_565_GrPixelConfig == config;
123 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
124 // R4G4B4A4 is not required to be supported so we actually
125 // store RGBA_4444 data as B4G4R4A4.
126 return kRGBA_4444_GrPixelConfig == config;
127 case VK_FORMAT_R8_UNORM:
128 return kAlpha_8_GrPixelConfig == config ||
129 kAlpha_8_as_Red_GrPixelConfig == config ||
130 kGray_8_GrPixelConfig == config ||
131 kGray_8_as_Red_GrPixelConfig == config;
132 case VK_FORMAT_R32G32B32A32_SFLOAT:
133 return kRGBA_float_GrPixelConfig == config;
134 case VK_FORMAT_R32G32_SFLOAT:
135 return kRG_float_GrPixelConfig == config;
136 case VK_FORMAT_R16G16B16A16_SFLOAT:
137 return kRGBA_half_GrPixelConfig == config;
138 case VK_FORMAT_R16_SFLOAT:
139 return kAlpha_half_GrPixelConfig == config ||
140 kAlpha_half_as_Red_GrPixelConfig == config;
141 default:
142 return false;
143 }
144 }
145
GrVkFormatIsSupported(VkFormat format)146 bool GrVkFormatIsSupported(VkFormat format) {
147 switch (format) {
148 case VK_FORMAT_R8G8B8A8_UNORM:
149 case VK_FORMAT_B8G8R8A8_UNORM:
150 case VK_FORMAT_R8G8B8A8_SRGB:
151 case VK_FORMAT_B8G8R8A8_SRGB:
152 case VK_FORMAT_R8G8B8A8_SINT:
153 case VK_FORMAT_R5G6B5_UNORM_PACK16:
154 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
155 case VK_FORMAT_R8_UNORM:
156 case VK_FORMAT_R32G32B32A32_SFLOAT:
157 case VK_FORMAT_R32G32_SFLOAT:
158 case VK_FORMAT_R16G16B16A16_SFLOAT:
159 case VK_FORMAT_R16_SFLOAT:
160 return true;
161 default:
162 return false;
163 }
164 }
165
GrVkFormatIsSRGB(VkFormat format,VkFormat * linearFormat)166 bool GrVkFormatIsSRGB(VkFormat format, VkFormat* linearFormat) {
167 VkFormat linearFmt = format;
168 switch (format) {
169 case VK_FORMAT_R8_SRGB:
170 linearFmt = VK_FORMAT_R8_UNORM;
171 break;
172 case VK_FORMAT_R8G8_SRGB:
173 linearFmt = VK_FORMAT_R8G8_UNORM;
174 break;
175 case VK_FORMAT_R8G8B8_SRGB:
176 linearFmt = VK_FORMAT_R8G8B8_UNORM;
177 break;
178 case VK_FORMAT_B8G8R8_SRGB:
179 linearFmt = VK_FORMAT_B8G8R8_UNORM;
180 break;
181 case VK_FORMAT_R8G8B8A8_SRGB:
182 linearFmt = VK_FORMAT_R8G8B8A8_UNORM;
183 break;
184 case VK_FORMAT_B8G8R8A8_SRGB:
185 linearFmt = VK_FORMAT_B8G8R8A8_UNORM;
186 break;
187 case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
188 linearFmt = VK_FORMAT_A8B8G8R8_UNORM_PACK32;
189 break;
190 case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
191 linearFmt = VK_FORMAT_BC1_RGB_UNORM_BLOCK;
192 break;
193 case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
194 linearFmt = VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
195 break;
196 case VK_FORMAT_BC2_SRGB_BLOCK:
197 linearFmt = VK_FORMAT_BC2_UNORM_BLOCK;
198 break;
199 case VK_FORMAT_BC3_SRGB_BLOCK:
200 linearFmt = VK_FORMAT_BC3_UNORM_BLOCK;
201 break;
202 case VK_FORMAT_BC7_SRGB_BLOCK:
203 linearFmt = VK_FORMAT_BC7_UNORM_BLOCK;
204 break;
205 case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
206 linearFmt = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;
207 break;
208 case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
209 linearFmt = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK;
210 break;
211 case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
212 linearFmt = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
213 break;
214 case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
215 linearFmt = VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
216 break;
217 case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
218 linearFmt = VK_FORMAT_ASTC_5x4_UNORM_BLOCK;
219 break;
220 case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
221 linearFmt = VK_FORMAT_ASTC_5x5_UNORM_BLOCK;
222 break;
223 case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
224 linearFmt = VK_FORMAT_ASTC_6x5_UNORM_BLOCK;
225 break;
226 case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
227 linearFmt = VK_FORMAT_ASTC_6x6_UNORM_BLOCK;
228 break;
229 case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
230 linearFmt = VK_FORMAT_ASTC_8x5_UNORM_BLOCK;
231 break;
232 case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
233 linearFmt = VK_FORMAT_ASTC_8x6_UNORM_BLOCK;
234 break;
235 case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
236 linearFmt = VK_FORMAT_ASTC_8x8_UNORM_BLOCK;
237 break;
238 case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
239 linearFmt = VK_FORMAT_ASTC_10x5_UNORM_BLOCK;
240 break;
241 case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
242 linearFmt = VK_FORMAT_ASTC_10x6_UNORM_BLOCK;
243 break;
244 case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
245 linearFmt = VK_FORMAT_ASTC_10x8_UNORM_BLOCK;
246 break;
247 case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
248 linearFmt = VK_FORMAT_ASTC_10x10_UNORM_BLOCK;
249 break;
250 case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
251 linearFmt = VK_FORMAT_ASTC_12x10_UNORM_BLOCK;
252 break;
253 case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
254 linearFmt = VK_FORMAT_ASTC_12x12_UNORM_BLOCK;
255 break;
256 default:
257 break;
258 }
259 if (linearFormat) {
260 *linearFormat = linearFmt;
261 }
262 return (linearFmt != format);
263 }
264
GrSampleCountToVkSampleCount(uint32_t samples,VkSampleCountFlagBits * vkSamples)265 bool GrSampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits* vkSamples) {
266 SkASSERT(samples >= 1);
267 switch (samples) {
268 case 1:
269 *vkSamples = VK_SAMPLE_COUNT_1_BIT;
270 return true;
271 case 2:
272 *vkSamples = VK_SAMPLE_COUNT_2_BIT;
273 return true;
274 case 4:
275 *vkSamples = VK_SAMPLE_COUNT_4_BIT;
276 return true;
277 case 8:
278 *vkSamples = VK_SAMPLE_COUNT_8_BIT;
279 return true;
280 case 16:
281 *vkSamples = VK_SAMPLE_COUNT_16_BIT;
282 return true;
283 case 32:
284 *vkSamples = VK_SAMPLE_COUNT_32_BIT;
285 return true;
286 case 64:
287 *vkSamples = VK_SAMPLE_COUNT_64_BIT;
288 return true;
289 default:
290 return false;
291 }
292 }
293
vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage)294 SkSL::Program::Kind vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage) {
295 if (VK_SHADER_STAGE_VERTEX_BIT == stage) {
296 return SkSL::Program::kVertex_Kind;
297 }
298 if (VK_SHADER_STAGE_GEOMETRY_BIT == stage) {
299 return SkSL::Program::kGeometry_Kind;
300 }
301 SkASSERT(VK_SHADER_STAGE_FRAGMENT_BIT == stage);
302 return SkSL::Program::kFragment_Kind;
303 }
304
skiasl_kind_to_vk_shader_stage(SkSL::Program::Kind kind)305 VkShaderStageFlagBits skiasl_kind_to_vk_shader_stage(SkSL::Program::Kind kind) {
306 if (SkSL::Program::kVertex_Kind == kind) {
307 return VK_SHADER_STAGE_VERTEX_BIT;
308 }
309 if (SkSL::Program::kGeometry_Kind == kind) {
310 return VK_SHADER_STAGE_GEOMETRY_BIT;
311 }
312 SkASSERT(SkSL::Program::kFragment_Kind == kind);
313 return VK_SHADER_STAGE_FRAGMENT_BIT;
314 }
315
GrCompileVkShaderModule(const GrVkGpu * gpu,const char * shaderString,VkShaderStageFlagBits stage,VkShaderModule * shaderModule,VkPipelineShaderStageCreateInfo * stageInfo,const SkSL::Program::Settings & settings,SkSL::Program::Inputs * outInputs)316 bool GrCompileVkShaderModule(const GrVkGpu* gpu,
317 const char* shaderString,
318 VkShaderStageFlagBits stage,
319 VkShaderModule* shaderModule,
320 VkPipelineShaderStageCreateInfo* stageInfo,
321 const SkSL::Program::Settings& settings,
322 SkSL::Program::Inputs* outInputs) {
323 std::unique_ptr<SkSL::Program> program = gpu->shaderCompiler()->convertProgram(
324 vk_shader_stage_to_skiasl_kind(stage),
325 SkSL::String(shaderString),
326 settings);
327 if (!program) {
328 SkDebugf("SkSL error:\n%s\n", gpu->shaderCompiler()->errorText().c_str());
329 SkASSERT(false);
330 }
331 *outInputs = program->fInputs;
332 SkSL::String code;
333 if (!gpu->shaderCompiler()->toSPIRV(*program, &code)) {
334 SkDebugf("%s\n", gpu->shaderCompiler()->errorText().c_str());
335 return false;
336 }
337
338 VkShaderModuleCreateInfo moduleCreateInfo;
339 memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo));
340 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
341 moduleCreateInfo.pNext = nullptr;
342 moduleCreateInfo.flags = 0;
343 moduleCreateInfo.codeSize = code.size();
344 moduleCreateInfo.pCode = (const uint32_t*)code.c_str();
345
346 VkResult err = GR_VK_CALL(gpu->vkInterface(), CreateShaderModule(gpu->device(),
347 &moduleCreateInfo,
348 nullptr,
349 shaderModule));
350 if (err) {
351 return false;
352 }
353
354 memset(stageInfo, 0, sizeof(VkPipelineShaderStageCreateInfo));
355 stageInfo->sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
356 stageInfo->pNext = nullptr;
357 stageInfo->flags = 0;
358 stageInfo->stage = skiasl_kind_to_vk_shader_stage(program->fKind);
359 stageInfo->module = *shaderModule;
360 stageInfo->pName = "main";
361 stageInfo->pSpecializationInfo = nullptr;
362
363 return true;
364 }
365