• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2022 Google LLC
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #include "include/gpu/graphite/vk/VulkanGraphiteUtils.h"
9 #include "src/gpu/graphite/vk/VulkanGraphiteUtilsPriv.h"
10 
11 #include "include/gpu/ShaderErrorHandler.h"
12 #include "include/gpu/graphite/Context.h"
13 #include "include/gpu/vk/VulkanBackendContext.h"
14 #include "src/core/SkTraceEvent.h"
15 #include "src/gpu/graphite/ContextPriv.h"
16 #include "src/gpu/graphite/vk/VulkanQueueManager.h"
17 #include "src/gpu/graphite/vk/VulkanSampler.h"
18 #include "src/gpu/graphite/vk/VulkanSharedContext.h"
19 #include "src/sksl/SkSLProgramSettings.h"
20 
21 namespace skgpu::graphite::ContextFactory {
22 
MakeVulkan(const VulkanBackendContext & backendContext,const ContextOptions & options)23 std::unique_ptr<Context> MakeVulkan(const VulkanBackendContext& backendContext,
24                                     const ContextOptions& options) {
25     sk_sp<SharedContext> sharedContext = VulkanSharedContext::Make(backendContext, options);
26     if (!sharedContext) {
27         return nullptr;
28     }
29 
30     std::unique_ptr<QueueManager> queueManager(new VulkanQueueManager(backendContext.fQueue,
31                                                                       sharedContext.get()));
32     if (!queueManager) {
33         return nullptr;
34     }
35 
36     return ContextCtorAccessor::MakeContext(std::move(sharedContext),
37                                             std::move(queueManager),
38                                             options);
39 }
40 
41 } // namespace skgpu::graphite::ContextFactory
42 
43 namespace skgpu::graphite {
44 
createVulkanShaderModule(const VulkanSharedContext * context,const std::string & spirv,VkShaderStageFlagBits stage)45 VkShaderModule createVulkanShaderModule(const VulkanSharedContext* context,
46                                         const std::string& spirv,
47                                         VkShaderStageFlagBits stage) {
48     TRACE_EVENT0("skia.shaders", "InstallVkShaderModule");
49     VkShaderModuleCreateInfo moduleCreateInfo;
50     memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo));
51     moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
52     moduleCreateInfo.pNext = nullptr;
53     moduleCreateInfo.flags = 0;
54     moduleCreateInfo.codeSize = spirv.size();
55     moduleCreateInfo.pCode = (const uint32_t*)spirv.c_str();
56 
57     VkShaderModule shaderModule;
58     VkResult result;
59     VULKAN_CALL_RESULT(context,
60                        result,
61                        CreateShaderModule(context->device(),
62                                           &moduleCreateInfo,
63                                           /*const VkAllocationCallbacks*=*/nullptr,
64                                           &shaderModule));
65     if (result != VK_SUCCESS) {
66         SKGPU_LOG_E("Failed to create VkShaderModule");
67         return VK_NULL_HANDLE;
68     }
69     return shaderModule;
70 }
71 
DescriptorDataToVkDescSetLayout(const VulkanSharedContext * ctxt,const SkSpan<DescriptorData> & requestedDescriptors,VkDescriptorSetLayout * outLayout)72 void DescriptorDataToVkDescSetLayout(const VulkanSharedContext* ctxt,
73                                      const SkSpan<DescriptorData>& requestedDescriptors,
74                                      VkDescriptorSetLayout* outLayout) {
75     // If requestedDescriptors is empty, that simply means we should create an empty placeholder
76     // layout that doesn't actually contain any descriptors.
77     skia_private::STArray<kDescriptorTypeCount, VkDescriptorSetLayoutBinding> bindingLayouts;
78     for (size_t i = 0; i < requestedDescriptors.size(); i++) {
79         if (requestedDescriptors[i].fCount != 0) {
80             const DescriptorData& currDescriptor = requestedDescriptors[i];
81             VkDescriptorSetLayoutBinding& layoutBinding = bindingLayouts.push_back();
82             memset(&layoutBinding, 0, sizeof(VkDescriptorSetLayoutBinding));
83             layoutBinding.binding = currDescriptor.fBindingIndex;
84             layoutBinding.descriptorType = DsTypeEnumToVkDs(currDescriptor.fType);
85             layoutBinding.descriptorCount = currDescriptor.fCount;
86             layoutBinding.stageFlags =
87                     PipelineStageFlagsToVkShaderStageFlags(currDescriptor.fPipelineStageFlags);
88             layoutBinding.pImmutableSamplers = currDescriptor.fImmutableSampler
89                     ? (static_cast<const VulkanSampler*>(
90                             currDescriptor.fImmutableSampler))->constVkSamplerPtr()
91                     : nullptr;
92         }
93     }
94 
95     VkDescriptorSetLayoutCreateInfo layoutCreateInfo;
96     memset(&layoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
97     layoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
98     layoutCreateInfo.pNext = nullptr;
99     layoutCreateInfo.flags = 0;
100     layoutCreateInfo.bindingCount = bindingLayouts.size();
101     layoutCreateInfo.pBindings = bindingLayouts.data();
102 
103     VkResult result;
104     VULKAN_CALL_RESULT(
105             ctxt,
106             result,
107             CreateDescriptorSetLayout(ctxt->device(), &layoutCreateInfo, nullptr, outLayout));
108     if (result != VK_SUCCESS) {
109         SkDebugf("Failed to create VkDescriptorSetLayout\n");
110         outLayout = VK_NULL_HANDLE;
111     }
112 }
113 
DsTypeEnumToVkDs(DescriptorType type)114 VkDescriptorType DsTypeEnumToVkDs(DescriptorType type) {
115     switch (type) {
116         case DescriptorType::kUniformBuffer:
117             return VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
118         case DescriptorType::kTextureSampler:
119             return VK_DESCRIPTOR_TYPE_SAMPLER;
120         case DescriptorType::kTexture:
121             return VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
122         case DescriptorType::kCombinedTextureSampler:
123             return VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
124         case DescriptorType::kStorageBuffer:
125             return VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
126         case DescriptorType::kInputAttachment:
127             return VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
128     }
129     SkUNREACHABLE;
130 }
131 
vkFormatIsSupported(VkFormat format)132 bool vkFormatIsSupported(VkFormat format) {
133     switch (format) {
134         case VK_FORMAT_R8G8B8A8_UNORM:
135         case VK_FORMAT_B8G8R8A8_UNORM:
136         case VK_FORMAT_R8G8B8A8_SRGB:
137         case VK_FORMAT_R8G8B8_UNORM:
138         case VK_FORMAT_R8G8_UNORM:
139         case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
140         case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
141         case VK_FORMAT_R5G6B5_UNORM_PACK16:
142         case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
143         case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
144         case VK_FORMAT_R8_UNORM:
145         case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
146         case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
147         case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
148         case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
149         case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
150         case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
151         case VK_FORMAT_R16G16B16A16_SFLOAT:
152         case VK_FORMAT_R16_SFLOAT:
153         case VK_FORMAT_R16_UNORM:
154         case VK_FORMAT_R16G16_UNORM:
155         case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
156         case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
157         case VK_FORMAT_R16G16B16A16_UNORM:
158         case VK_FORMAT_R16G16_SFLOAT:
159         case VK_FORMAT_S8_UINT:
160         case VK_FORMAT_D16_UNORM:
161         case VK_FORMAT_D32_SFLOAT:
162         case VK_FORMAT_D24_UNORM_S8_UINT:
163         case VK_FORMAT_D32_SFLOAT_S8_UINT:
164             return true;
165         default:
166             return false;
167     }
168 }
169 
PipelineStageFlagsToVkShaderStageFlags(SkEnumBitMask<PipelineStageFlags> stageFlags)170 VkShaderStageFlags PipelineStageFlagsToVkShaderStageFlags(
171         SkEnumBitMask<PipelineStageFlags> stageFlags) {
172     VkShaderStageFlags vkStageFlags = 0;
173     if (stageFlags & PipelineStageFlags::kVertexShader) {
174         vkStageFlags |= VK_SHADER_STAGE_VERTEX_BIT;
175     }
176     if (stageFlags & PipelineStageFlags::kFragmentShader) {
177         vkStageFlags |= VK_SHADER_STAGE_FRAGMENT_BIT;
178     }
179     if (stageFlags & PipelineStageFlags::kCompute) {
180         vkStageFlags |= VK_SHADER_STAGE_COMPUTE_BIT;
181     }
182     return vkStageFlags;
183 }
184 
185 namespace ycbcrPackaging {
nonFormatInfoAsUInt32(const VulkanYcbcrConversionInfo & conversionInfo)186 uint32_t nonFormatInfoAsUInt32(const VulkanYcbcrConversionInfo& conversionInfo) {
187     static_assert(kComponentAShift + kComponentBits <= 32);
188 
189     SkASSERT(conversionInfo.fYcbcrModel                  < (1u << kYcbcrModelBits        ));
190     SkASSERT(conversionInfo.fYcbcrRange                  < (1u << kYcbcrRangeBits        ));
191     SkASSERT(conversionInfo.fXChromaOffset               < (1u << kXChromaOffsetBits     ));
192     SkASSERT(conversionInfo.fYChromaOffset               < (1u << kYChromaOffsetBits     ));
193     SkASSERT(conversionInfo.fChromaFilter                < (1u << kChromaFilterBits      ));
194     SkASSERT(conversionInfo.fForceExplicitReconstruction < (1u << kForceExplicitReconBits));
195     SkASSERT(conversionInfo.fComponents.r                < (1u << kComponentBits         ));
196     SkASSERT(conversionInfo.fComponents.g                < (1u << kComponentBits         ));
197     SkASSERT(conversionInfo.fComponents.b                < (1u << kComponentBits         ));
198     SkASSERT(conversionInfo.fComponents.a                < (1u << kComponentBits         ));
199 
200     bool usesExternalFormat = conversionInfo.fFormat == VK_FORMAT_UNDEFINED;
201 
202     return (((uint32_t)(usesExternalFormat                         ) << kUsesExternalFormatShift) |
203             ((uint32_t)(conversionInfo.fYcbcrModel                 ) << kYcbcrModelShift        ) |
204             ((uint32_t)(conversionInfo.fYcbcrRange                 ) << kYcbcrRangeShift        ) |
205             ((uint32_t)(conversionInfo.fXChromaOffset              ) << kXChromaOffsetShift     ) |
206             ((uint32_t)(conversionInfo.fYChromaOffset              ) << kYChromaOffsetShift     ) |
207             ((uint32_t)(conversionInfo.fChromaFilter               ) << kChromaFilterShift      ) |
208             ((uint32_t)(conversionInfo.fForceExplicitReconstruction) << kForceExplicitReconShift) |
209             ((uint32_t)(conversionInfo.fComponents.r               ) << kComponentRShift        ) |
210             ((uint32_t)(conversionInfo.fComponents.g               ) << kComponentGShift        ) |
211             ((uint32_t)(conversionInfo.fComponents.b               ) << kComponentBShift        ) |
212             ((uint32_t)(conversionInfo.fComponents.a               ) << kComponentAShift        ));
213 }
214 
numInt32sNeeded(const VulkanYcbcrConversionInfo & conversionInfo)215 int numInt32sNeeded(const VulkanYcbcrConversionInfo& conversionInfo) {
216     if (!conversionInfo.isValid()) {
217         return 0;
218     }
219     return conversionInfo.fFormat == VK_FORMAT_UNDEFINED ? SamplerDesc::kInt32sNeededExternalFormat
220                                                          : SamplerDesc::kInt32sNeededKnownFormat;
221 }
222 } // namespace ycbcrPackaging
223 
224 } // namespace skgpu::graphite
225