1 /*
2 * Copyright 2022 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/gpu/graphite/vk/VulkanGraphiteUtils.h"
9
10 #include "include/core/SkStream.h"
11 #include "include/gpu/ShaderErrorHandler.h"
12 #include "include/gpu/graphite/Context.h"
13 #include "include/gpu/vk/VulkanBackendContext.h"
14 #include "src/core/SkTraceEvent.h"
15 #include "src/gpu/graphite/ContextPriv.h"
16 #include "src/gpu/graphite/RenderPassDesc.h"
17 #include "src/gpu/graphite/TextureFormat.h"
18 #include "src/gpu/graphite/vk/VulkanQueueManager.h"
19 #include "src/gpu/graphite/vk/VulkanSampler.h"
20 #include "src/gpu/graphite/vk/VulkanSharedContext.h"
21 #include "src/sksl/SkSLProgramSettings.h"
22
23 namespace skgpu::graphite::ContextFactory {
24
MakeVulkan(const VulkanBackendContext & backendContext,const ContextOptions & options)25 std::unique_ptr<Context> MakeVulkan(const VulkanBackendContext& backendContext,
26 const ContextOptions& options) {
27 sk_sp<SharedContext> sharedContext = VulkanSharedContext::Make(backendContext, options);
28 if (!sharedContext) {
29 return nullptr;
30 }
31
32 std::unique_ptr<QueueManager> queueManager(new VulkanQueueManager(backendContext.fQueue,
33 sharedContext.get()));
34 if (!queueManager) {
35 return nullptr;
36 }
37
38 return ContextCtorAccessor::MakeContext(std::move(sharedContext),
39 std::move(queueManager),
40 options);
41 }
42
43 } // namespace skgpu::graphite::ContextFactory
44
45 namespace skgpu::graphite {
46
createVulkanShaderModule(const VulkanSharedContext * context,const std::string & spirv,VkShaderStageFlagBits stage)47 VkShaderModule createVulkanShaderModule(const VulkanSharedContext* context,
48 const std::string& spirv,
49 VkShaderStageFlagBits stage) {
50 TRACE_EVENT0("skia.shaders", "InstallVkShaderModule");
51 VkShaderModuleCreateInfo moduleCreateInfo;
52 memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo));
53 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
54 moduleCreateInfo.pNext = nullptr;
55 moduleCreateInfo.flags = 0;
56 moduleCreateInfo.codeSize = spirv.size();
57 moduleCreateInfo.pCode = (const uint32_t*)spirv.c_str();
58
59 VkShaderModule shaderModule;
60 VkResult result;
61 VULKAN_CALL_RESULT(context,
62 result,
63 CreateShaderModule(context->device(),
64 &moduleCreateInfo,
65 /*const VkAllocationCallbacks*=*/nullptr,
66 &shaderModule));
67 if (result != VK_SUCCESS) {
68 SKGPU_LOG_E("Failed to create VkShaderModule");
69 return VK_NULL_HANDLE;
70 }
71 return shaderModule;
72 }
73
DescriptorDataToVkDescSetLayout(const VulkanSharedContext * ctxt,const SkSpan<DescriptorData> & requestedDescriptors,VkDescriptorSetLayout * outLayout)74 void DescriptorDataToVkDescSetLayout(const VulkanSharedContext* ctxt,
75 const SkSpan<DescriptorData>& requestedDescriptors,
76 VkDescriptorSetLayout* outLayout) {
77 // If requestedDescriptors is empty, that simply means we should create an empty placeholder
78 // layout that doesn't actually contain any descriptors.
79 skia_private::STArray<kDescriptorTypeCount, VkDescriptorSetLayoutBinding> bindingLayouts;
80 for (size_t i = 0; i < requestedDescriptors.size(); i++) {
81 if (requestedDescriptors[i].fCount != 0) {
82 const DescriptorData& currDescriptor = requestedDescriptors[i];
83 VkDescriptorSetLayoutBinding& layoutBinding = bindingLayouts.push_back();
84 memset(&layoutBinding, 0, sizeof(VkDescriptorSetLayoutBinding));
85 layoutBinding.binding = currDescriptor.fBindingIndex;
86 layoutBinding.descriptorType = DsTypeEnumToVkDs(currDescriptor.fType);
87 layoutBinding.descriptorCount = currDescriptor.fCount;
88 layoutBinding.stageFlags =
89 PipelineStageFlagsToVkShaderStageFlags(currDescriptor.fPipelineStageFlags);
90 layoutBinding.pImmutableSamplers = currDescriptor.fImmutableSampler
91 ? (static_cast<const VulkanSampler*>(
92 currDescriptor.fImmutableSampler))->constVkSamplerPtr()
93 : nullptr;
94 }
95 }
96
97 VkDescriptorSetLayoutCreateInfo layoutCreateInfo;
98 memset(&layoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
99 layoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
100 layoutCreateInfo.pNext = nullptr;
101 layoutCreateInfo.flags = 0;
102 layoutCreateInfo.bindingCount = bindingLayouts.size();
103 layoutCreateInfo.pBindings = bindingLayouts.data();
104
105 VkResult result;
106 VULKAN_CALL_RESULT(
107 ctxt,
108 result,
109 CreateDescriptorSetLayout(ctxt->device(), &layoutCreateInfo, nullptr, outLayout));
110 if (result != VK_SUCCESS) {
111 SkDebugf("Failed to create VkDescriptorSetLayout\n");
112 outLayout = VK_NULL_HANDLE;
113 }
114 }
115
DsTypeEnumToVkDs(DescriptorType type)116 VkDescriptorType DsTypeEnumToVkDs(DescriptorType type) {
117 switch (type) {
118 case DescriptorType::kUniformBuffer:
119 return VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
120 case DescriptorType::kTextureSampler:
121 return VK_DESCRIPTOR_TYPE_SAMPLER;
122 case DescriptorType::kTexture:
123 return VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
124 case DescriptorType::kCombinedTextureSampler:
125 return VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
126 case DescriptorType::kStorageBuffer:
127 return VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
128 case DescriptorType::kInputAttachment:
129 return VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
130 }
131 SkUNREACHABLE;
132 }
133
VkFormatToTextureFormat(VkFormat format)134 TextureFormat VkFormatToTextureFormat(VkFormat format) {
135 switch (format) {
136 case VK_FORMAT_R8_UNORM: return TextureFormat::kR8;
137 case VK_FORMAT_R16_UNORM: return TextureFormat::kR16;
138 case VK_FORMAT_R16_SFLOAT: return TextureFormat::kR16F;
139 case VK_FORMAT_R32_SFLOAT: return TextureFormat::kR32F;
140 case VK_FORMAT_R8G8_UNORM: return TextureFormat::kRG8;
141 case VK_FORMAT_R16G16_UNORM: return TextureFormat::kRG16;
142 case VK_FORMAT_R16G16_SFLOAT: return TextureFormat::kRG16F;
143 case VK_FORMAT_R32G32_SFLOAT: return TextureFormat::kRG32F;
144 case VK_FORMAT_R8G8B8_UNORM: return TextureFormat::kRGB8;
145 case VK_FORMAT_B8G8R8_UNORM: return TextureFormat::kBGR8;
146 case VK_FORMAT_R5G6B5_UNORM_PACK16: return TextureFormat::kB5_G6_R5;
147 case VK_FORMAT_B5G6R5_UNORM_PACK16: return TextureFormat::kR5_G6_B5;
148 case VK_FORMAT_R16G16B16_UNORM: return TextureFormat::kRGB16;
149 case VK_FORMAT_R16G16B16_SFLOAT: return TextureFormat::kRGB16F;
150 case VK_FORMAT_R32G32B32_SFLOAT: return TextureFormat::kRGB32F;
151 case VK_FORMAT_R8G8B8_SRGB: return TextureFormat::kRGB8_sRGB;
152 case VK_FORMAT_R8G8B8A8_UNORM: return TextureFormat::kRGBA8;
153 case VK_FORMAT_A8B8G8R8_UNORM_PACK32: return TextureFormat::kRGBA8;
154 case VK_FORMAT_R16G16B16A16_UNORM: return TextureFormat::kRGBA16;
155 case VK_FORMAT_R16G16B16A16_SFLOAT: return TextureFormat::kRGBA16F;
156 case VK_FORMAT_R32G32B32A32_SFLOAT: return TextureFormat::kRGBA32F;
157 case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return TextureFormat::kRGB10_A2;
158 case VK_FORMAT_R8G8B8A8_SRGB: return TextureFormat::kRGBA8_sRGB;
159 case VK_FORMAT_B8G8R8A8_UNORM: return TextureFormat::kBGRA8;
160 case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return TextureFormat::kBGR10_A2;
161 case VK_FORMAT_B8G8R8A8_SRGB: return TextureFormat::kBGRA8_sRGB;
162 case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return TextureFormat::kABGR4;
163 case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return TextureFormat::kARGB4;
164 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: return TextureFormat::kRGB8_ETC2;
165 case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK: return TextureFormat::kRGB8_ETC2_sRGB;
166 case VK_FORMAT_BC1_RGB_UNORM_BLOCK: return TextureFormat::kRGB8_BC1;
167 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: return TextureFormat::kRGBA8_BC1;
168 case VK_FORMAT_BC1_RGBA_SRGB_BLOCK: return TextureFormat::kRGBA8_BC1_sRGB;
169 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: return TextureFormat::kYUV8_P2_420;
170 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: return TextureFormat::kYUV8_P3_420;
171 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
172 return TextureFormat::kYUV10x6_P2_420;
173 case VK_FORMAT_S8_UINT: return TextureFormat::kS8;
174 case VK_FORMAT_D16_UNORM: return TextureFormat::kD16;
175 case VK_FORMAT_D32_SFLOAT: return TextureFormat::kD32F;
176 case VK_FORMAT_D24_UNORM_S8_UINT: return TextureFormat::kD24_S8;
177 case VK_FORMAT_D32_SFLOAT_S8_UINT: return TextureFormat::kD32F_S8;
178 default: return TextureFormat::kUnsupported;
179 }
180 }
181
PipelineStageFlagsToVkShaderStageFlags(SkEnumBitMask<PipelineStageFlags> stageFlags)182 VkShaderStageFlags PipelineStageFlagsToVkShaderStageFlags(
183 SkEnumBitMask<PipelineStageFlags> stageFlags) {
184 VkShaderStageFlags vkStageFlags = 0;
185 if (stageFlags & PipelineStageFlags::kVertexShader) {
186 vkStageFlags |= VK_SHADER_STAGE_VERTEX_BIT;
187 }
188 if (stageFlags & PipelineStageFlags::kFragmentShader) {
189 vkStageFlags |= VK_SHADER_STAGE_FRAGMENT_BIT;
190 }
191 if (stageFlags & PipelineStageFlags::kCompute) {
192 vkStageFlags |= VK_SHADER_STAGE_COMPUTE_BIT;
193 }
194 return vkStageFlags;
195 }
196
RenderPassDescWillLoadMSAAFromResolve(const RenderPassDesc & renderPassDesc)197 bool RenderPassDescWillLoadMSAAFromResolve(const RenderPassDesc& renderPassDesc) {
198 return renderPassDesc.fColorResolveAttachment.fTextureInfo.isValid() &&
199 renderPassDesc.fColorResolveAttachment.fLoadOp == LoadOp::kLoad;
200 }
201
202 } // namespace skgpu::graphite
203