1 /*
2 * Copyright 2022 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "include/gpu/graphite/vk/VulkanGraphiteUtils.h"
9 #include "src/gpu/graphite/vk/VulkanGraphiteUtilsPriv.h"
10
11 #include "include/gpu/ShaderErrorHandler.h"
12 #include "include/gpu/graphite/Context.h"
13 #include "include/gpu/vk/VulkanBackendContext.h"
14 #include "src/core/SkTraceEvent.h"
15 #include "src/gpu/graphite/ContextPriv.h"
16 #include "src/gpu/graphite/vk/VulkanQueueManager.h"
17 #include "src/gpu/graphite/vk/VulkanSharedContext.h"
18 #include "src/sksl/SkSLProgramSettings.h"
19
20 namespace skgpu::graphite::ContextFactory {
21
MakeVulkan(const VulkanBackendContext & backendContext,const ContextOptions & options)22 std::unique_ptr<Context> MakeVulkan(const VulkanBackendContext& backendContext,
23 const ContextOptions& options) {
24 sk_sp<SharedContext> sharedContext = VulkanSharedContext::Make(backendContext, options);
25 if (!sharedContext) {
26 return nullptr;
27 }
28
29 std::unique_ptr<QueueManager> queueManager(new VulkanQueueManager(backendContext.fQueue,
30 sharedContext.get()));
31 if (!queueManager) {
32 return nullptr;
33 }
34
35 return ContextCtorAccessor::MakeContext(std::move(sharedContext),
36 std::move(queueManager),
37 options);
38 }
39
40 } // namespace skgpu::graphite::ContextFactory
41
42 namespace skgpu::graphite {
43
createVulkanShaderModule(const VulkanSharedContext * context,const std::string & spirv,VkShaderStageFlagBits stage)44 VkShaderModule createVulkanShaderModule(const VulkanSharedContext* context,
45 const std::string& spirv,
46 VkShaderStageFlagBits stage) {
47 TRACE_EVENT0("skia.shaders", "InstallVkShaderModule");
48 VkShaderModuleCreateInfo moduleCreateInfo;
49 memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo));
50 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
51 moduleCreateInfo.pNext = nullptr;
52 moduleCreateInfo.flags = 0;
53 moduleCreateInfo.codeSize = spirv.size();
54 moduleCreateInfo.pCode = (const uint32_t*)spirv.c_str();
55
56 VkShaderModule shaderModule;
57 VkResult result;
58 VULKAN_CALL_RESULT(context,
59 result,
60 CreateShaderModule(context->device(),
61 &moduleCreateInfo,
62 /*const VkAllocationCallbacks*=*/nullptr,
63 &shaderModule));
64 if (result != VK_SUCCESS) {
65 SKGPU_LOG_E("Failed to create VkShaderModule");
66 return VK_NULL_HANDLE;
67 }
68 return shaderModule;
69 }
70
DescriptorDataToVkDescSetLayout(const VulkanSharedContext * ctxt,const SkSpan<DescriptorData> & requestedDescriptors,VkDescriptorSetLayout * outLayout)71 void DescriptorDataToVkDescSetLayout(const VulkanSharedContext* ctxt,
72 const SkSpan<DescriptorData>& requestedDescriptors,
73 VkDescriptorSetLayout* outLayout) {
74 skia_private::STArray<kDescriptorTypeCount, VkDescriptorSetLayoutBinding> bindingLayouts;
75 for (size_t i = 0; i < requestedDescriptors.size(); i++) {
76 if (requestedDescriptors[i].fCount != 0) {
77 const DescriptorData& currDescriptor = requestedDescriptors[i];
78 VkDescriptorSetLayoutBinding& layoutBinding = bindingLayouts.push_back();
79 memset(&layoutBinding, 0, sizeof(VkDescriptorSetLayoutBinding));
80 layoutBinding.binding = currDescriptor.fBindingIndex;
81 layoutBinding.descriptorType = DsTypeEnumToVkDs(currDescriptor.fType);
82 layoutBinding.descriptorCount = currDescriptor.fCount;
83 layoutBinding.stageFlags =
84 PipelineStageFlagsToVkShaderStageFlags(currDescriptor.fPipelineStageFlags);
85 // TODO(b/302126498): Set pImmutableSampler to currDescriptor.fImmutableSampler once
86 // immutable samplers are created and used within graphite.
87 layoutBinding.pImmutableSamplers = nullptr;
88 }
89 }
90
91 VkDescriptorSetLayoutCreateInfo layoutCreateInfo;
92 memset(&layoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
93 layoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
94 layoutCreateInfo.pNext = nullptr;
95 layoutCreateInfo.flags = 0;
96 layoutCreateInfo.bindingCount = bindingLayouts.size();
97 layoutCreateInfo.pBindings = &bindingLayouts.front();
98
99 VkResult result;
100 VULKAN_CALL_RESULT(
101 ctxt,
102 result,
103 CreateDescriptorSetLayout(ctxt->device(), &layoutCreateInfo, nullptr, outLayout));
104 if (result != VK_SUCCESS) {
105 SkDebugf("Failed to create VkDescriptorSetLayout\n");
106 outLayout = VK_NULL_HANDLE;
107 }
108 }
109
DsTypeEnumToVkDs(DescriptorType type)110 VkDescriptorType DsTypeEnumToVkDs(DescriptorType type) {
111 switch (type) {
112 case DescriptorType::kUniformBuffer:
113 return VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
114 case DescriptorType::kTextureSampler:
115 return VK_DESCRIPTOR_TYPE_SAMPLER;
116 case DescriptorType::kTexture:
117 return VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
118 case DescriptorType::kCombinedTextureSampler:
119 return VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
120 case DescriptorType::kStorageBuffer:
121 return VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
122 case DescriptorType::kInputAttachment:
123 return VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
124 }
125 SkUNREACHABLE;
126 }
127
vkFormatIsSupported(VkFormat format)128 bool vkFormatIsSupported(VkFormat format) {
129 switch (format) {
130 case VK_FORMAT_R8G8B8A8_UNORM:
131 case VK_FORMAT_B8G8R8A8_UNORM:
132 case VK_FORMAT_R8G8B8A8_SRGB:
133 case VK_FORMAT_R8G8B8_UNORM:
134 case VK_FORMAT_R8G8_UNORM:
135 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
136 case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
137 case VK_FORMAT_R5G6B5_UNORM_PACK16:
138 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
139 case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
140 case VK_FORMAT_R8_UNORM:
141 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
142 case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
143 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
144 case VK_FORMAT_R16G16B16A16_SFLOAT:
145 case VK_FORMAT_R16_SFLOAT:
146 case VK_FORMAT_R16_UNORM:
147 case VK_FORMAT_R16G16_UNORM:
148 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
149 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
150 case VK_FORMAT_R16G16B16A16_UNORM:
151 case VK_FORMAT_R16G16_SFLOAT:
152 case VK_FORMAT_S8_UINT:
153 case VK_FORMAT_D16_UNORM:
154 case VK_FORMAT_D32_SFLOAT:
155 case VK_FORMAT_D24_UNORM_S8_UINT:
156 case VK_FORMAT_D32_SFLOAT_S8_UINT:
157 return true;
158 default:
159 return false;
160 }
161 }
162
PipelineStageFlagsToVkShaderStageFlags(SkEnumBitMask<PipelineStageFlags> stageFlags)163 VkShaderStageFlags PipelineStageFlagsToVkShaderStageFlags(
164 SkEnumBitMask<PipelineStageFlags> stageFlags) {
165 VkShaderStageFlags vkStageFlags = 0;
166 if (stageFlags & PipelineStageFlags::kVertexShader) {
167 vkStageFlags |= VK_SHADER_STAGE_VERTEX_BIT;
168 }
169 if (stageFlags & PipelineStageFlags::kFragmentShader) {
170 vkStageFlags |= VK_SHADER_STAGE_FRAGMENT_BIT;
171 }
172 if (stageFlags & PipelineStageFlags::kCompute) {
173 vkStageFlags |= VK_SHADER_STAGE_COMPUTE_BIT;
174 }
175 return vkStageFlags;
176 }
177
178 namespace ycbcrPackaging {
nonFormatInfoAsUInt32(const VulkanYcbcrConversionInfo & conversionInfo)179 uint32_t nonFormatInfoAsUInt32(const VulkanYcbcrConversionInfo& conversionInfo) {
180 static_assert(kComponentAShift + kComponentBits <= 32);
181
182 SkASSERT(conversionInfo.fYcbcrModel < (1u << kYcbcrModelBits ));
183 SkASSERT(conversionInfo.fYcbcrRange < (1u << kYcbcrRangeBits ));
184 SkASSERT(conversionInfo.fXChromaOffset < (1u << kXChromaOffsetBits ));
185 SkASSERT(conversionInfo.fYChromaOffset < (1u << kYChromaOffsetBits ));
186 SkASSERT(conversionInfo.fChromaFilter < (1u << kChromaFilterBits ));
187 SkASSERT(conversionInfo.fForceExplicitReconstruction < (1u << kForceExplicitReconBits));
188 SkASSERT(conversionInfo.fComponents.r < (1u << kComponentBits ));
189 SkASSERT(conversionInfo.fComponents.g < (1u << kComponentBits ));
190 SkASSERT(conversionInfo.fComponents.b < (1u << kComponentBits ));
191 SkASSERT(conversionInfo.fComponents.a < (1u << kComponentBits ));
192
193 bool usesExternalFormat = conversionInfo.fFormat == VK_FORMAT_UNDEFINED;
194
195 return (((uint32_t)(usesExternalFormat ) << kUsesExternalFormatShift) |
196 ((uint32_t)(conversionInfo.fYcbcrModel ) << kYcbcrModelShift ) |
197 ((uint32_t)(conversionInfo.fYcbcrRange ) << kYcbcrRangeShift ) |
198 ((uint32_t)(conversionInfo.fXChromaOffset ) << kXChromaOffsetShift ) |
199 ((uint32_t)(conversionInfo.fYChromaOffset ) << kYChromaOffsetShift ) |
200 ((uint32_t)(conversionInfo.fChromaFilter ) << kChromaFilterShift ) |
201 ((uint32_t)(conversionInfo.fForceExplicitReconstruction) << kForceExplicitReconShift) |
202 ((uint32_t)(conversionInfo.fComponents.r ) << kComponentRShift ) |
203 ((uint32_t)(conversionInfo.fComponents.g ) << kComponentGShift ) |
204 ((uint32_t)(conversionInfo.fComponents.b ) << kComponentBShift ) |
205 ((uint32_t)(conversionInfo.fComponents.a ) << kComponentAShift ));
206 }
207
numInt32sNeeded(const VulkanYcbcrConversionInfo & conversionInfo)208 int numInt32sNeeded(const VulkanYcbcrConversionInfo& conversionInfo) {
209 if (!conversionInfo.isValid()) {
210 return 0;
211 }
212 return (conversionInfo.fFormat == VK_FORMAT_UNDEFINED) ? kInt32sNeededExternalFormat
213 : kInt32sNeededKnownFormat;
214 }
215 } // namespace ycbcrPackaging
216
217 } // namespace skgpu::graphite
218