1 /*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/gpu/vk/GrVkUtil.h"
9
10 #include "include/gpu/GrDirectContext.h"
11 #include "src/core/SkTraceEvent.h"
12 #include "src/gpu/GrDataUtils.h"
13 #include "src/gpu/GrDirectContextPriv.h"
14 #include "src/gpu/vk/GrVkGpu.h"
15 #include "src/sksl/SkSLCompiler.h"
16 #include "hitrace_meter.h"
17
GrVkFormatIsSupported(VkFormat format)18 bool GrVkFormatIsSupported(VkFormat format) {
19 switch (format) {
20 case VK_FORMAT_R8G8B8A8_UNORM:
21 case VK_FORMAT_B8G8R8A8_UNORM:
22 case VK_FORMAT_R8G8B8A8_SRGB:
23 case VK_FORMAT_R8G8B8_UNORM:
24 case VK_FORMAT_R8G8_UNORM:
25 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
26 case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
27 case VK_FORMAT_R5G6B5_UNORM_PACK16:
28 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
29 case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
30 case VK_FORMAT_R8_UNORM:
31 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
32 case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
33 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
34 case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
35 case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
36 case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
37 case VK_FORMAT_R16G16B16A16_SFLOAT:
38 case VK_FORMAT_R16_SFLOAT:
39 case VK_FORMAT_R16_UNORM:
40 case VK_FORMAT_R16G16_UNORM:
41 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
42 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
43 case VK_FORMAT_R16G16B16A16_UNORM:
44 case VK_FORMAT_R16G16_SFLOAT:
45 case VK_FORMAT_S8_UINT:
46 case VK_FORMAT_D24_UNORM_S8_UINT:
47 case VK_FORMAT_D32_SFLOAT_S8_UINT:
48 return true;
49 default:
50 return false;
51 }
52 }
53
GrVkFormatNeedsYcbcrSampler(VkFormat format)54 bool GrVkFormatNeedsYcbcrSampler(VkFormat format) {
55 return format == VK_FORMAT_G8_B8R8_2PLANE_420_UNORM ||
56 format == VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
57 }
58
GrSampleCountToVkSampleCount(uint32_t samples,VkSampleCountFlagBits * vkSamples)59 bool GrSampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits* vkSamples) {
60 SkASSERT(samples >= 1);
61 switch (samples) {
62 case 1:
63 *vkSamples = VK_SAMPLE_COUNT_1_BIT;
64 return true;
65 case 2:
66 *vkSamples = VK_SAMPLE_COUNT_2_BIT;
67 return true;
68 case 4:
69 *vkSamples = VK_SAMPLE_COUNT_4_BIT;
70 return true;
71 case 8:
72 *vkSamples = VK_SAMPLE_COUNT_8_BIT;
73 return true;
74 case 16:
75 *vkSamples = VK_SAMPLE_COUNT_16_BIT;
76 return true;
77 default:
78 return false;
79 }
80 }
81
vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage)82 SkSL::ProgramKind vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage) {
83 if (VK_SHADER_STAGE_VERTEX_BIT == stage) {
84 return SkSL::ProgramKind::kVertex;
85 }
86 SkASSERT(VK_SHADER_STAGE_FRAGMENT_BIT == stage);
87 return SkSL::ProgramKind::kFragment;
88 }
89
GrCompileVkShaderModule(GrVkGpu * gpu,const SkSL::String & shaderString,VkShaderStageFlagBits stage,VkShaderModule * shaderModule,VkPipelineShaderStageCreateInfo * stageInfo,const SkSL::Program::Settings & settings,SkSL::String * outSPIRV,SkSL::Program::Inputs * outInputs)90 bool GrCompileVkShaderModule(GrVkGpu* gpu,
91 const SkSL::String& shaderString,
92 VkShaderStageFlagBits stage,
93 VkShaderModule* shaderModule,
94 VkPipelineShaderStageCreateInfo* stageInfo,
95 const SkSL::Program::Settings& settings,
96 SkSL::String* outSPIRV,
97 SkSL::Program::Inputs* outInputs) {
98 #ifndef SKIA_OHOS_DEBUG
99 SKIA_OHOS_TRACE_PRIV("skia.shaders", "OHOS_CompileSpriV");
100 #endif
101 TRACE_EVENT0("skia.shaders", "CompileVkShaderModule");
102 auto errorHandler = gpu->getContext()->priv().getShaderErrorHandler();
103 std::unique_ptr<SkSL::Program> program = gpu->shaderCompiler()->convertProgram(
104 vk_shader_stage_to_skiasl_kind(stage), shaderString, settings);
105 if (!program) {
106 errorHandler->compileError(shaderString.c_str(),
107 gpu->shaderCompiler()->errorText().c_str());
108 return false;
109 }
110 *outInputs = program->fInputs;
111 if (!gpu->shaderCompiler()->toSPIRV(*program, outSPIRV)) {
112 errorHandler->compileError(shaderString.c_str(),
113 gpu->shaderCompiler()->errorText().c_str());
114 return false;
115 }
116
117 return GrInstallVkShaderModule(gpu, *outSPIRV, stage, shaderModule, stageInfo);
118 }
119
GrInstallVkShaderModule(GrVkGpu * gpu,const SkSL::String & spirv,VkShaderStageFlagBits stage,VkShaderModule * shaderModule,VkPipelineShaderStageCreateInfo * stageInfo)120 bool GrInstallVkShaderModule(GrVkGpu* gpu,
121 const SkSL::String& spirv,
122 VkShaderStageFlagBits stage,
123 VkShaderModule* shaderModule,
124 VkPipelineShaderStageCreateInfo* stageInfo) {
125 TRACE_EVENT0("skia.shaders", "InstallVkShaderModule");
126 VkShaderModuleCreateInfo moduleCreateInfo;
127 memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo));
128 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
129 moduleCreateInfo.pNext = nullptr;
130 moduleCreateInfo.flags = 0;
131 moduleCreateInfo.codeSize = spirv.size();
132 moduleCreateInfo.pCode = (const uint32_t*)spirv.c_str();
133
134 VkResult err;
135 GR_VK_CALL_RESULT(gpu, err, CreateShaderModule(gpu->device(), &moduleCreateInfo, nullptr,
136 shaderModule));
137 if (err) {
138 return false;
139 }
140
141 memset(stageInfo, 0, sizeof(VkPipelineShaderStageCreateInfo));
142 stageInfo->sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
143 stageInfo->pNext = nullptr;
144 stageInfo->flags = 0;
145 stageInfo->stage = stage;
146 stageInfo->module = *shaderModule;
147 stageInfo->pName = "main";
148 stageInfo->pSpecializationInfo = nullptr;
149
150 return true;
151 }
152
GrVkFormatIsCompressed(VkFormat vkFormat)153 bool GrVkFormatIsCompressed(VkFormat vkFormat) {
154 switch (vkFormat) {
155 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
156 case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
157 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
158 case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
159 case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
160 case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
161 return true;
162 default:
163 return false;
164 }
165 SkUNREACHABLE;
166 }
167