1 /*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/gpu/vk/GrVkUtil.h"
9
10 #include "src/gpu/GrContextPriv.h"
11 #include "src/gpu/GrDataUtils.h"
12 #include "src/gpu/vk/GrVkGpu.h"
13 #include "src/sksl/SkSLCompiler.h"
14
15 #ifdef SK_DEBUG
GrVkFormatColorTypePairIsValid(VkFormat format,GrColorType colorType)16 bool GrVkFormatColorTypePairIsValid(VkFormat format, GrColorType colorType) {
17 switch (format) {
18 case VK_FORMAT_R8G8B8A8_UNORM: return GrColorType::kRGBA_8888 == colorType ||
19 GrColorType::kRGB_888x == colorType;
20 case VK_FORMAT_B8G8R8A8_UNORM: return GrColorType::kBGRA_8888 == colorType;
21 case VK_FORMAT_R8G8B8A8_SRGB: return GrColorType::kRGBA_8888_SRGB == colorType;
22 case VK_FORMAT_R8G8B8_UNORM: return GrColorType::kRGB_888x == colorType;
23 case VK_FORMAT_R8G8_UNORM: return GrColorType::kRG_88 == colorType;
24 case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return GrColorType::kRGBA_1010102 == colorType;
25 case VK_FORMAT_R5G6B5_UNORM_PACK16: return GrColorType::kBGR_565 == colorType;
26 // R4G4B4A4 is not required to be supported so we actually
27 // store RGBA_4444 data as B4G4R4A4.
28 case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return GrColorType::kABGR_4444 == colorType;
29 case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return GrColorType::kABGR_4444 == colorType;
30 case VK_FORMAT_R8_UNORM: return GrColorType::kAlpha_8 == colorType ||
31 GrColorType::kGray_8 == colorType;
32 case VK_FORMAT_R32G32B32A32_SFLOAT: return GrColorType::kRGBA_F32 == colorType;
33 case VK_FORMAT_R16G16B16A16_SFLOAT: return GrColorType::kRGBA_F16 == colorType ||
34 GrColorType::kRGBA_F16_Clamped == colorType;
35 case VK_FORMAT_R16_SFLOAT: return GrColorType::kAlpha_F16 == colorType;
36 case VK_FORMAT_R16_UNORM: return GrColorType::kR_16 == colorType;
37 case VK_FORMAT_R16G16_UNORM: return GrColorType::kRG_1616 == colorType;
38 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: return GrColorType::kRGB_888x == colorType;
39 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: return GrColorType::kRGB_888x == colorType;
40 // Experimental (for Y416 and mutant P016/P010)
41 case VK_FORMAT_R16G16B16A16_UNORM: return GrColorType::kRGBA_16161616 == colorType;
42 case VK_FORMAT_R16G16_SFLOAT: return GrColorType::kRG_F16 == colorType;
43 default: return false;
44 }
45
46 SkUNREACHABLE;
47 }
48 #endif
49
GrVkFormatIsSupported(VkFormat format)50 bool GrVkFormatIsSupported(VkFormat format) {
51 switch (format) {
52 case VK_FORMAT_R8G8B8A8_UNORM:
53 case VK_FORMAT_B8G8R8A8_UNORM:
54 case VK_FORMAT_R8G8B8A8_SRGB:
55 case VK_FORMAT_R8G8B8_UNORM:
56 case VK_FORMAT_R8G8_UNORM:
57 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
58 case VK_FORMAT_R5G6B5_UNORM_PACK16:
59 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
60 case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
61 case VK_FORMAT_R8_UNORM:
62 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
63 case VK_FORMAT_R32G32B32A32_SFLOAT:
64 case VK_FORMAT_R16G16B16A16_SFLOAT:
65 case VK_FORMAT_R16_SFLOAT:
66 case VK_FORMAT_R16_UNORM:
67 case VK_FORMAT_R16G16_UNORM:
68 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
69 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
70 // Experimental (for Y416 and mutant P016/P010)
71 case VK_FORMAT_R16G16B16A16_UNORM:
72 case VK_FORMAT_R16G16_SFLOAT:
73 return true;
74 default:
75 return false;
76 }
77 }
78
GrVkFormatNeedsYcbcrSampler(VkFormat format)79 bool GrVkFormatNeedsYcbcrSampler(VkFormat format) {
80 return format == VK_FORMAT_G8_B8R8_2PLANE_420_UNORM ||
81 format == VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
82 }
83
GrSampleCountToVkSampleCount(uint32_t samples,VkSampleCountFlagBits * vkSamples)84 bool GrSampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits* vkSamples) {
85 SkASSERT(samples >= 1);
86 switch (samples) {
87 case 1:
88 *vkSamples = VK_SAMPLE_COUNT_1_BIT;
89 return true;
90 case 2:
91 *vkSamples = VK_SAMPLE_COUNT_2_BIT;
92 return true;
93 case 4:
94 *vkSamples = VK_SAMPLE_COUNT_4_BIT;
95 return true;
96 case 8:
97 *vkSamples = VK_SAMPLE_COUNT_8_BIT;
98 return true;
99 case 16:
100 *vkSamples = VK_SAMPLE_COUNT_16_BIT;
101 return true;
102 case 32:
103 *vkSamples = VK_SAMPLE_COUNT_32_BIT;
104 return true;
105 case 64:
106 *vkSamples = VK_SAMPLE_COUNT_64_BIT;
107 return true;
108 default:
109 return false;
110 }
111 }
112
vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage)113 SkSL::Program::Kind vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage) {
114 if (VK_SHADER_STAGE_VERTEX_BIT == stage) {
115 return SkSL::Program::kVertex_Kind;
116 }
117 if (VK_SHADER_STAGE_GEOMETRY_BIT == stage) {
118 return SkSL::Program::kGeometry_Kind;
119 }
120 SkASSERT(VK_SHADER_STAGE_FRAGMENT_BIT == stage);
121 return SkSL::Program::kFragment_Kind;
122 }
123
GrCompileVkShaderModule(const GrVkGpu * gpu,const SkSL::String & shaderString,VkShaderStageFlagBits stage,VkShaderModule * shaderModule,VkPipelineShaderStageCreateInfo * stageInfo,const SkSL::Program::Settings & settings,SkSL::String * outSPIRV,SkSL::Program::Inputs * outInputs)124 bool GrCompileVkShaderModule(const GrVkGpu* gpu,
125 const SkSL::String& shaderString,
126 VkShaderStageFlagBits stage,
127 VkShaderModule* shaderModule,
128 VkPipelineShaderStageCreateInfo* stageInfo,
129 const SkSL::Program::Settings& settings,
130 SkSL::String* outSPIRV,
131 SkSL::Program::Inputs* outInputs) {
132 auto errorHandler = gpu->getContext()->priv().getShaderErrorHandler();
133 std::unique_ptr<SkSL::Program> program = gpu->shaderCompiler()->convertProgram(
134 vk_shader_stage_to_skiasl_kind(stage), shaderString, settings);
135 if (!program) {
136 errorHandler->compileError(shaderString.c_str(),
137 gpu->shaderCompiler()->errorText().c_str());
138 return false;
139 }
140 *outInputs = program->fInputs;
141 if (!gpu->shaderCompiler()->toSPIRV(*program, outSPIRV)) {
142 errorHandler->compileError(shaderString.c_str(),
143 gpu->shaderCompiler()->errorText().c_str());
144 return false;
145 }
146
147 return GrInstallVkShaderModule(gpu, *outSPIRV, stage, shaderModule, stageInfo);
148 }
149
GrInstallVkShaderModule(const GrVkGpu * gpu,const SkSL::String & spirv,VkShaderStageFlagBits stage,VkShaderModule * shaderModule,VkPipelineShaderStageCreateInfo * stageInfo)150 bool GrInstallVkShaderModule(const GrVkGpu* gpu,
151 const SkSL::String& spirv,
152 VkShaderStageFlagBits stage,
153 VkShaderModule* shaderModule,
154 VkPipelineShaderStageCreateInfo* stageInfo) {
155 VkShaderModuleCreateInfo moduleCreateInfo;
156 memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo));
157 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
158 moduleCreateInfo.pNext = nullptr;
159 moduleCreateInfo.flags = 0;
160 moduleCreateInfo.codeSize = spirv.size();
161 moduleCreateInfo.pCode = (const uint32_t*)spirv.c_str();
162
163 VkResult err = GR_VK_CALL(gpu->vkInterface(), CreateShaderModule(gpu->device(),
164 &moduleCreateInfo,
165 nullptr,
166 shaderModule));
167 if (err) {
168 return false;
169 }
170
171 memset(stageInfo, 0, sizeof(VkPipelineShaderStageCreateInfo));
172 stageInfo->sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
173 stageInfo->pNext = nullptr;
174 stageInfo->flags = 0;
175 stageInfo->stage = stage;
176 stageInfo->module = *shaderModule;
177 stageInfo->pName = "main";
178 stageInfo->pSpecializationInfo = nullptr;
179
180 return true;
181 }
182
GrVkBytesPerFormat(VkFormat vkFormat)183 size_t GrVkBytesPerFormat(VkFormat vkFormat) {
184 switch (vkFormat) {
185 case VK_FORMAT_R8_UNORM:
186 return 1;
187
188 case VK_FORMAT_R5G6B5_UNORM_PACK16:
189 case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
190 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
191 case VK_FORMAT_R8G8_UNORM:
192 case VK_FORMAT_R16_SFLOAT:
193 case VK_FORMAT_R16_UNORM:
194 return 2;
195
196 case VK_FORMAT_R8G8B8_UNORM:
197 return 3;
198
199 case VK_FORMAT_R8G8B8A8_UNORM:
200 case VK_FORMAT_R8G8B8A8_SRGB:
201 case VK_FORMAT_B8G8R8A8_UNORM:
202 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
203 case VK_FORMAT_R16G16_UNORM:
204 return 4;
205
206 case VK_FORMAT_R16G16B16A16_SFLOAT:
207 return 8;
208
209 case VK_FORMAT_R32G32B32A32_SFLOAT:
210 return 16;
211
212 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
213 return 0;
214
215 // Experimental (for Y416 and mutant P016/P010)
216 case VK_FORMAT_R16G16B16A16_UNORM:
217 return 8;
218 case VK_FORMAT_R16G16_SFLOAT:
219 return 4;
220
221 default:
222 SK_ABORT("Invalid Vk format");
223 }
224
225 SK_ABORT("Invalid Vk format");
226 }
227
GrVkFormatIsCompressed(VkFormat vkFormat)228 bool GrVkFormatIsCompressed(VkFormat vkFormat) {
229 switch (vkFormat) {
230 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
231 return true;
232 default:
233 return false;
234 }
235 }
236
GrVkFormatToCompressionType(VkFormat vkFormat,SkImage::CompressionType * compressionType)237 bool GrVkFormatToCompressionType(VkFormat vkFormat, SkImage::CompressionType* compressionType) {
238 switch (vkFormat) {
239 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
240 *compressionType = SkImage::kETC1_CompressionType;
241 return true;
242 default:
243 return false;
244 }
245 }
246