1 /*
2 * Copyright 2022 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/gpu/graphite/vk/VulkanResourceProvider.h"
9
10 #include "include/core/SkSpan.h"
11 #include "include/gpu/MutableTextureState.h"
12 #include "include/gpu/graphite/BackendTexture.h"
13 #include "include/gpu/graphite/vk/VulkanGraphiteTypes.h"
14 #include "include/gpu/vk/VulkanMutableTextureState.h"
15 #include "src/gpu/graphite/Buffer.h"
16 #include "src/gpu/graphite/ComputePipeline.h"
17 #include "src/gpu/graphite/GraphicsPipeline.h"
18 #include "src/gpu/graphite/RenderPassDesc.h"
19 #include "src/gpu/graphite/Sampler.h"
20 #include "src/gpu/graphite/Texture.h"
21 #include "src/gpu/graphite/TextureInfoPriv.h"
22 #include "src/gpu/graphite/vk/VulkanBuffer.h"
23 #include "src/gpu/graphite/vk/VulkanCommandBuffer.h"
24 #include "src/gpu/graphite/vk/VulkanDescriptorPool.h"
25 #include "src/gpu/graphite/vk/VulkanDescriptorSet.h"
26 #include "src/gpu/graphite/vk/VulkanFramebuffer.h"
27 #include "src/gpu/graphite/vk/VulkanGraphicsPipeline.h"
28 #include "src/gpu/graphite/vk/VulkanGraphiteUtils.h"
29 #include "src/gpu/graphite/vk/VulkanRenderPass.h"
30 #include "src/gpu/graphite/vk/VulkanSampler.h"
31 #include "src/gpu/graphite/vk/VulkanSharedContext.h"
32 #include "src/gpu/graphite/vk/VulkanTexture.h"
33 #include "src/gpu/graphite/vk/VulkanYcbcrConversion.h"
34 #include "src/gpu/vk/VulkanMemory.h"
35 #include "src/sksl/SkSLCompiler.h"
36
37 #ifdef SK_BUILD_FOR_ANDROID
38 #include "src/gpu/vk/VulkanUtilsPriv.h"
39 #include <android/hardware_buffer.h>
40 #endif
41
42 namespace skgpu::graphite {
43
44 constexpr int kMaxNumberOfCachedBufferDescSets = 1024;
45
46 namespace {
create_mock_layout(const VulkanSharedContext * sharedContext)47 VkPipelineLayout create_mock_layout(const VulkanSharedContext* sharedContext) {
48 SkASSERT(sharedContext);
49 VkPushConstantRange pushConstantRange;
50 pushConstantRange.offset = 0;
51 pushConstantRange.size = VulkanResourceProvider::kIntrinsicConstantSize;
52 pushConstantRange.stageFlags = VulkanResourceProvider::kIntrinsicConstantStageFlags;
53
54 VkPipelineLayoutCreateInfo layoutCreateInfo;
55 memset(&layoutCreateInfo, 0, sizeof(VkPipelineLayoutCreateFlags));
56 layoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
57 layoutCreateInfo.pNext = nullptr;
58 layoutCreateInfo.flags = 0;
59 layoutCreateInfo.setLayoutCount = 0;
60 layoutCreateInfo.pSetLayouts = nullptr;
61 layoutCreateInfo.pushConstantRangeCount = 1;
62 layoutCreateInfo.pPushConstantRanges = &pushConstantRange;
63
64 VkResult result;
65 VkPipelineLayout layout;
66 VULKAN_CALL_RESULT(sharedContext,
67 result,
68 CreatePipelineLayout(sharedContext->device(),
69 &layoutCreateInfo,
70 /*const VkAllocationCallbacks*=*/nullptr,
71 &layout));
72 return layout;
73 }
74 } // anonymous
VulkanResourceProvider(SharedContext * sharedContext,SingleOwner * singleOwner,uint32_t recorderID,size_t resourceBudget)75 VulkanResourceProvider::VulkanResourceProvider(SharedContext* sharedContext,
76 SingleOwner* singleOwner,
77 uint32_t recorderID,
78 size_t resourceBudget)
79 : ResourceProvider(sharedContext, singleOwner, recorderID, resourceBudget)
80 , fMockPushConstantPipelineLayout(
81 create_mock_layout(static_cast<const VulkanSharedContext*>(sharedContext)))
82 , fUniformBufferDescSetCache(kMaxNumberOfCachedBufferDescSets) {}
83
~VulkanResourceProvider()84 VulkanResourceProvider::~VulkanResourceProvider() {
85 if (fPipelineCache != VK_NULL_HANDLE) {
86 VULKAN_CALL(this->vulkanSharedContext()->interface(),
87 DestroyPipelineCache(this->vulkanSharedContext()->device(),
88 fPipelineCache,
89 nullptr));
90 }
91 if (fMSAALoadVertShaderModule != VK_NULL_HANDLE) {
92 VULKAN_CALL(this->vulkanSharedContext()->interface(),
93 DestroyShaderModule(this->vulkanSharedContext()->device(),
94 fMSAALoadVertShaderModule,
95 nullptr));
96 }
97 if (fMSAALoadFragShaderModule != VK_NULL_HANDLE) {
98 VULKAN_CALL(this->vulkanSharedContext()->interface(),
99 DestroyShaderModule(this->vulkanSharedContext()->device(),
100 fMSAALoadFragShaderModule,
101 nullptr));
102 }
103 if (fMSAALoadPipelineLayout != VK_NULL_HANDLE) {
104 VULKAN_CALL(this->vulkanSharedContext()->interface(),
105 DestroyPipelineLayout(this->vulkanSharedContext()->device(),
106 fMSAALoadPipelineLayout,
107 nullptr));
108 }
109 if (fMockPushConstantPipelineLayout) {
110 VULKAN_CALL(this->vulkanSharedContext()->interface(),
111 DestroyPipelineLayout(this->vulkanSharedContext()->device(),
112 fMockPushConstantPipelineLayout,
113 nullptr));
114 }
115 }
116
vulkanSharedContext() const117 const VulkanSharedContext* VulkanResourceProvider::vulkanSharedContext() const {
118 return static_cast<const VulkanSharedContext*>(fSharedContext);
119 }
120
onCreateWrappedTexture(const BackendTexture & texture)121 sk_sp<Texture> VulkanResourceProvider::onCreateWrappedTexture(const BackendTexture& texture) {
122 sk_sp<VulkanYcbcrConversion> ycbcrConversion;
123 const auto& vkInfo = TextureInfoPriv::Get<VulkanTextureInfo>(texture.info());
124 if (vkInfo.fYcbcrConversionInfo.isValid()) {
125 ycbcrConversion = this->findOrCreateCompatibleYcbcrConversion(vkInfo.fYcbcrConversionInfo);
126 if (!ycbcrConversion) {
127 return nullptr;
128 }
129 }
130
131 return VulkanTexture::MakeWrapped(this->vulkanSharedContext(),
132 texture.dimensions(),
133 texture.info(),
134 BackendTextures::GetMutableState(texture),
135 BackendTextures::GetVkImage(texture),
136 /*alloc=*/{} /*Skia does not own wrapped texture memory*/,
137 std::move(ycbcrConversion));
138 }
139
createGraphicsPipeline(const RuntimeEffectDictionary * runtimeDict,const UniqueKey & pipelineKey,const GraphicsPipelineDesc & pipelineDesc,const RenderPassDesc & renderPassDesc,SkEnumBitMask<PipelineCreationFlags> pipelineCreationFlags,uint32_t compilationID)140 sk_sp<GraphicsPipeline> VulkanResourceProvider::createGraphicsPipeline(
141 const RuntimeEffectDictionary* runtimeDict,
142 const UniqueKey& pipelineKey,
143 const GraphicsPipelineDesc& pipelineDesc,
144 const RenderPassDesc& renderPassDesc,
145 SkEnumBitMask<PipelineCreationFlags> pipelineCreationFlags,
146 uint32_t compilationID) {
147 return VulkanGraphicsPipeline::Make(this,
148 runtimeDict,
149 pipelineKey,
150 pipelineDesc,
151 renderPassDesc,
152 pipelineCreationFlags,
153 compilationID);
154 }
155
createComputePipeline(const ComputePipelineDesc &)156 sk_sp<ComputePipeline> VulkanResourceProvider::createComputePipeline(const ComputePipelineDesc&) {
157 return nullptr;
158 }
159
createTexture(SkISize size,const TextureInfo & info)160 sk_sp<Texture> VulkanResourceProvider::createTexture(SkISize size,
161 const TextureInfo& info) {
162 sk_sp<VulkanYcbcrConversion> ycbcrConversion;
163 const auto& vkInfo = TextureInfoPriv::Get<VulkanTextureInfo>(info);
164 if (vkInfo.fYcbcrConversionInfo.isValid()) {
165 ycbcrConversion = this->findOrCreateCompatibleYcbcrConversion(vkInfo.fYcbcrConversionInfo);
166 if (!ycbcrConversion) {
167 return nullptr;
168 }
169 }
170
171 return VulkanTexture::Make(this->vulkanSharedContext(),
172 size,
173 info,
174 std::move(ycbcrConversion));
175 }
176
createBuffer(size_t size,BufferType type,AccessPattern accessPattern)177 sk_sp<Buffer> VulkanResourceProvider::createBuffer(size_t size,
178 BufferType type,
179 AccessPattern accessPattern) {
180 return VulkanBuffer::Make(this->vulkanSharedContext(), size, type, accessPattern);
181 }
182
createSampler(const SamplerDesc & samplerDesc)183 sk_sp<Sampler> VulkanResourceProvider::createSampler(const SamplerDesc& samplerDesc) {
184 sk_sp<VulkanYcbcrConversion> ycbcrConversion = nullptr;
185
186 // Non-zero conversion information means the sampler utilizes a ycbcr conversion.
187 const bool usesYcbcrConversion = samplerDesc.isImmutable();
188 if (usesYcbcrConversion) {
189 VulkanYcbcrConversionInfo ycbcrInfo = VulkanYcbcrConversion::FromImmutableSamplerInfo(
190 samplerDesc.immutableSamplerInfo());
191 ycbcrConversion = this->findOrCreateCompatibleYcbcrConversion(ycbcrInfo);
192 if (!ycbcrConversion) {
193 return nullptr;
194 }
195 }
196
197 return VulkanSampler::Make(this->vulkanSharedContext(),
198 samplerDesc,
199 std::move(ycbcrConversion));
200 }
201
onCreateBackendTexture(SkISize dimensions,const TextureInfo & info)202 BackendTexture VulkanResourceProvider::onCreateBackendTexture(SkISize dimensions,
203 const TextureInfo& info) {
204 const auto& vkTexInfo = TextureInfoPriv::Get<VulkanTextureInfo>(info);
205 VulkanTexture::CreatedImageInfo createdTextureInfo;
206 if (!VulkanTexture::MakeVkImage(this->vulkanSharedContext(), dimensions, info,
207 &createdTextureInfo)) {
208 return {};
209 }
210 return BackendTextures::MakeVulkan(
211 dimensions,
212 vkTexInfo,
213 skgpu::MutableTextureStates::GetVkImageLayout(createdTextureInfo.fMutableState.get()),
214 skgpu::MutableTextureStates::GetVkQueueFamilyIndex(
215 createdTextureInfo.fMutableState.get()),
216 createdTextureInfo.fImage,
217 createdTextureInfo.fMemoryAlloc);
218 }
219
220 namespace {
build_desc_set_key(const SkSpan<DescriptorData> & requestedDescriptors)221 GraphiteResourceKey build_desc_set_key(const SkSpan<DescriptorData>& requestedDescriptors) {
222 static const ResourceType kType = GraphiteResourceKey::GenerateResourceType();
223
224 // The number of int32s needed for a key can depend on whether we use immutable samplers or not.
225 // So, accumulte key data while passing through to check for that quantity and simply copy
226 // into builder afterwards.
227 skia_private::TArray<uint32_t> keyData (requestedDescriptors.size() + 1);
228
229 keyData.push_back(requestedDescriptors.size());
230 for (const DescriptorData& desc : requestedDescriptors) {
231 keyData.push_back(static_cast<uint8_t>(desc.fType) << 24 |
232 desc.fBindingIndex << 16 |
233 static_cast<uint16_t>(desc.fCount));
234 if (desc.fImmutableSampler) {
235 const VulkanSampler* sampler =
236 static_cast<const VulkanSampler*>(desc.fImmutableSampler);
237 SkASSERT(sampler);
238 keyData.push_back_n(sampler->samplerDesc().asSpan().size(),
239 sampler->samplerDesc().asSpan().data());
240 }
241 }
242
243 GraphiteResourceKey key;
244 GraphiteResourceKey::Builder builder(&key, kType, keyData.size());
245
246 for (int i = 0; i < keyData.size(); i++) {
247 builder[i] = keyData[i];
248 }
249
250 builder.finish();
251 return key;
252 }
253
add_new_desc_set_to_cache(const VulkanSharedContext * context,const sk_sp<VulkanDescriptorPool> & pool,const GraphiteResourceKey & descSetKey,ResourceCache * resourceCache)254 sk_sp<VulkanDescriptorSet> add_new_desc_set_to_cache(const VulkanSharedContext* context,
255 const sk_sp<VulkanDescriptorPool>& pool,
256 const GraphiteResourceKey& descSetKey,
257 ResourceCache* resourceCache) {
258 sk_sp<VulkanDescriptorSet> descSet = VulkanDescriptorSet::Make(context, pool);
259 if (!descSet) {
260 return nullptr;
261 }
262 resourceCache->insertResource(descSet.get(), descSetKey, Budgeted::kYes, Shareable::kNo);
263
264 return descSet;
265 }
266 } // anonymous namespace
267
findOrCreateDescriptorSet(SkSpan<DescriptorData> requestedDescriptors)268 sk_sp<VulkanDescriptorSet> VulkanResourceProvider::findOrCreateDescriptorSet(
269 SkSpan<DescriptorData> requestedDescriptors) {
270 if (requestedDescriptors.empty()) {
271 return nullptr;
272 }
273
274 // Search for available descriptor sets by assembling a key based upon the set's structure.
275 GraphiteResourceKey key = build_desc_set_key(requestedDescriptors);
276 if (auto descSet = fResourceCache->findAndRefResource(
277 key, skgpu::Budgeted::kYes, Shareable::kNo)) {
278 // A non-null resource pointer indicates we have found an available descriptor set.
279 return sk_sp<VulkanDescriptorSet>(static_cast<VulkanDescriptorSet*>(descSet));
280 }
281
282
283 // If we did not find an existing avilable desc set, allocate sets with the appropriate layout
284 // and add them to the cache.
285 VkDescriptorSetLayout layout;
286 const VulkanSharedContext* context = this->vulkanSharedContext();
287 DescriptorDataToVkDescSetLayout(context, requestedDescriptors, &layout);
288 if (!layout) {
289 return nullptr;
290 }
291 auto pool = VulkanDescriptorPool::Make(context, requestedDescriptors, layout);
292 if (!pool) {
293 VULKAN_CALL(context->interface(), DestroyDescriptorSetLayout(context->device(),
294 layout,
295 nullptr));
296 return nullptr;
297 }
298
299 // Start with allocating one descriptor set. If one cannot be successfully created, then we can
300 // return early before attempting to allocate more. Storing a ptr to the first set also
301 // allows us to return that later without having to perform a find operation on the cache once
302 // all the sets are added.
303 auto firstDescSet =
304 add_new_desc_set_to_cache(context, pool, key, fResourceCache.get());
305 if (!firstDescSet) {
306 return nullptr;
307 }
308
309 // Continue to allocate & cache the maximum number of sets so they can be easily accessed as
310 // they're needed.
311 for (int i = 1; i < VulkanDescriptorPool::kMaxNumSets ; i++) {
312 auto descSet =
313 add_new_desc_set_to_cache(context, pool, key, fResourceCache.get());
314 if (!descSet) {
315 SKGPU_LOG_W("Descriptor set allocation %d of %d was unsuccessful; no more sets will be"
316 "allocated from this pool.", i, VulkanDescriptorPool::kMaxNumSets);
317 break;
318 }
319 }
320
321 return firstDescSet;
322 }
323
324 namespace {
325
make_ubo_bind_group_key(SkSpan<DescriptorData> requestedDescriptors,SkSpan<BindBufferInfo> bindUniformBufferInfo)326 VulkanResourceProvider::UniformBindGroupKey make_ubo_bind_group_key(
327 SkSpan<DescriptorData> requestedDescriptors,
328 SkSpan<BindBufferInfo> bindUniformBufferInfo) {
329 VulkanResourceProvider::UniformBindGroupKey uniqueKey;
330 {
331 // Each entry in the bind group needs 2 uint32_t in the key:
332 // - buffer's unique ID: 32 bits.
333 // - buffer's binding size: 32 bits.
334 // We need total of 4 entries in the uniform buffer bind group.
335 // Unused entries will be assigned zero values.
336 VulkanResourceProvider::UniformBindGroupKey::Builder builder(&uniqueKey);
337
338 for (uint32_t i = 0; i < VulkanGraphicsPipeline::kNumUniformBuffers; ++i) {
339 builder[2 * i] = 0;
340 builder[2 * i + 1] = 0;
341 }
342
343 for (uint32_t i = 0; i < requestedDescriptors.size(); ++i) {
344 int descriptorBindingIndex = requestedDescriptors[i].fBindingIndex;
345 SkASSERT(SkTo<unsigned long>(descriptorBindingIndex) < bindUniformBufferInfo.size());
346 SkASSERT(SkTo<unsigned long>(descriptorBindingIndex) <
347 VulkanGraphicsPipeline::kNumUniformBuffers);
348 const auto& bindInfo = bindUniformBufferInfo[descriptorBindingIndex];
349 const VulkanBuffer* boundBuffer = static_cast<const VulkanBuffer*>(bindInfo.fBuffer);
350 SkASSERT(boundBuffer);
351 builder[2 * descriptorBindingIndex] = boundBuffer->uniqueID().asUInt();
352 builder[2 * descriptorBindingIndex + 1] = bindInfo.fSize;
353 }
354
355 builder.finish();
356 }
357
358 return uniqueKey;
359 }
360
update_uniform_descriptor_set(SkSpan<DescriptorData> requestedDescriptors,SkSpan<BindBufferInfo> bindUniformBufferInfo,VkDescriptorSet descSet,const VulkanSharedContext * sharedContext)361 void update_uniform_descriptor_set(SkSpan<DescriptorData> requestedDescriptors,
362 SkSpan<BindBufferInfo> bindUniformBufferInfo,
363 VkDescriptorSet descSet,
364 const VulkanSharedContext* sharedContext) {
365 for (size_t i = 0; i < requestedDescriptors.size(); i++) {
366 int descriptorBindingIndex = requestedDescriptors[i].fBindingIndex;
367 SkASSERT(SkTo<unsigned long>(descriptorBindingIndex) < bindUniformBufferInfo.size());
368 const auto& bindInfo = bindUniformBufferInfo[descriptorBindingIndex];
369 if (bindInfo.fBuffer) {
370 #if defined(SK_DEBUG)
371 static uint64_t maxBufferRange =
372 sharedContext->caps()->storageBufferSupport()
373 ? sharedContext->vulkanCaps().maxStorageBufferRange()
374 : sharedContext->vulkanCaps().maxUniformBufferRange();
375 SkASSERT(bindInfo.fSize <= maxBufferRange);
376 #endif
377 VkDescriptorBufferInfo bufferInfo;
378 memset(&bufferInfo, 0, sizeof(VkDescriptorBufferInfo));
379 auto vulkanBuffer = static_cast<const VulkanBuffer*>(bindInfo.fBuffer);
380 bufferInfo.buffer = vulkanBuffer->vkBuffer();
381 bufferInfo.offset = 0; // We always use dynamic ubos so we set the base offset to 0
382 bufferInfo.range = bindInfo.fSize;
383
384 VkWriteDescriptorSet writeInfo;
385 memset(&writeInfo, 0, sizeof(VkWriteDescriptorSet));
386 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
387 writeInfo.pNext = nullptr;
388 writeInfo.dstSet = descSet;
389 writeInfo.dstBinding = descriptorBindingIndex;
390 writeInfo.dstArrayElement = 0;
391 writeInfo.descriptorCount = requestedDescriptors[i].fCount;
392 writeInfo.descriptorType = DsTypeEnumToVkDs(requestedDescriptors[i].fType);
393 writeInfo.pImageInfo = nullptr;
394 writeInfo.pBufferInfo = &bufferInfo;
395 writeInfo.pTexelBufferView = nullptr;
396
397 // TODO(b/293925059): Migrate to updating all the uniform descriptors with one driver
398 // call. Calling UpdateDescriptorSets once to encapsulate updates to all uniform
399 // descriptors would be ideal, but that led to issues with draws where all the UBOs
400 // within that set would unexpectedly be assigned the same offset. Updating them one at
401 // a time within this loop works in the meantime but is suboptimal.
402 VULKAN_CALL(sharedContext->interface(),
403 UpdateDescriptorSets(sharedContext->device(),
404 /*descriptorWriteCount=*/1,
405 &writeInfo,
406 /*descriptorCopyCount=*/0,
407 /*pDescriptorCopies=*/nullptr));
408 }
409 }
410 }
411
412 } // anonymous namespace
413
findOrCreateUniformBuffersDescriptorSet(SkSpan<DescriptorData> requestedDescriptors,SkSpan<BindBufferInfo> bindUniformBufferInfo)414 sk_sp<VulkanDescriptorSet> VulkanResourceProvider::findOrCreateUniformBuffersDescriptorSet(
415 SkSpan<DescriptorData> requestedDescriptors,
416 SkSpan<BindBufferInfo> bindUniformBufferInfo) {
417 SkASSERT(requestedDescriptors.size() <= VulkanGraphicsPipeline::kNumUniformBuffers);
418
419 auto key = make_ubo_bind_group_key(requestedDescriptors, bindUniformBufferInfo);
420 auto* existingDescSet = fUniformBufferDescSetCache.find(key);
421 if (existingDescSet) {
422 return *existingDescSet;
423 }
424 sk_sp<VulkanDescriptorSet> newDS = this->findOrCreateDescriptorSet(requestedDescriptors);
425 if (!newDS) {
426 return nullptr;
427 }
428
429 update_uniform_descriptor_set(requestedDescriptors,
430 bindUniformBufferInfo,
431 *newDS->descriptorSet(),
432 this->vulkanSharedContext());
433 return *fUniformBufferDescSetCache.insert(key, newDS);
434 }
435
436
findOrCreateRenderPassWithKnownKey(const RenderPassDesc & renderPassDesc,bool compatibleOnly,const GraphiteResourceKey & rpKey)437 sk_sp<VulkanRenderPass> VulkanResourceProvider::findOrCreateRenderPassWithKnownKey(
438 const RenderPassDesc& renderPassDesc,
439 bool compatibleOnly,
440 const GraphiteResourceKey& rpKey) {
441 static constexpr Budgeted kBudgeted = Budgeted::kYes;
442 static constexpr Shareable kShareable = Shareable::kYes;
443
444 if (Resource* resource = fResourceCache->findAndRefResource(rpKey, kBudgeted, kShareable)) {
445 return sk_sp<VulkanRenderPass>(static_cast<VulkanRenderPass*>(resource));
446 }
447
448 sk_sp<VulkanRenderPass> renderPass =
449 VulkanRenderPass::MakeRenderPass(this->vulkanSharedContext(),
450 renderPassDesc,
451 compatibleOnly);
452 if (!renderPass) {
453 return nullptr;
454 }
455
456 fResourceCache->insertResource(renderPass.get(), rpKey, kBudgeted, kShareable);
457
458 return renderPass;
459 }
460
findOrCreateRenderPass(const RenderPassDesc & renderPassDesc,bool compatibleOnly)461 sk_sp<VulkanRenderPass> VulkanResourceProvider::findOrCreateRenderPass(
462 const RenderPassDesc& renderPassDesc, bool compatibleOnly) {
463 GraphiteResourceKey rpKey = VulkanRenderPass::MakeRenderPassKey(renderPassDesc, compatibleOnly);
464
465 return this->findOrCreateRenderPassWithKnownKey(renderPassDesc, compatibleOnly, rpKey);
466 }
467
pipelineCache()468 VkPipelineCache VulkanResourceProvider::pipelineCache() {
469 if (fPipelineCache == VK_NULL_HANDLE) {
470 VkPipelineCacheCreateInfo createInfo;
471 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
472 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
473 createInfo.pNext = nullptr;
474 createInfo.flags = 0;
475 createInfo.initialDataSize = 0;
476 createInfo.pInitialData = nullptr;
477 VkResult result;
478 VULKAN_CALL_RESULT(this->vulkanSharedContext(),
479 result,
480 CreatePipelineCache(this->vulkanSharedContext()->device(),
481 &createInfo,
482 nullptr,
483 &fPipelineCache));
484 if (VK_SUCCESS != result) {
485 fPipelineCache = VK_NULL_HANDLE;
486 }
487 }
488 return fPipelineCache;
489 }
490
491 namespace {
492
gather_attachment_views(skia_private::TArray<VkImageView> & attachmentViews,VulkanTexture * colorTexture,VulkanTexture * resolveTexture,VulkanTexture * depthStencilTexture)493 void gather_attachment_views(skia_private::TArray<VkImageView>& attachmentViews,
494 VulkanTexture* colorTexture,
495 VulkanTexture* resolveTexture,
496 VulkanTexture* depthStencilTexture) {
497 if (colorTexture) {
498 VkImageView& colorAttachmentView = attachmentViews.push_back();
499 colorAttachmentView =
500 colorTexture->getImageView(VulkanImageView::Usage::kAttachment)->imageView();
501
502 if (resolveTexture) {
503 VkImageView& resolveView = attachmentViews.push_back();
504 resolveView =
505 resolveTexture->getImageView(VulkanImageView::Usage::kAttachment)->imageView();
506 }
507 }
508
509 if (depthStencilTexture) {
510 VkImageView& stencilView = attachmentViews.push_back();
511 stencilView =
512 depthStencilTexture->getImageView(VulkanImageView::Usage::kAttachment)->imageView();
513 }
514 }
515
516 } // anonymous namespace
517
createFramebuffer(const VulkanSharedContext * context,VulkanTexture * colorTexture,VulkanTexture * resolveTexture,VulkanTexture * depthStencilTexture,const RenderPassDesc & renderPassDesc,const VulkanRenderPass & renderPass,const int width,const int height)518 sk_sp<VulkanFramebuffer> VulkanResourceProvider::createFramebuffer(
519 const VulkanSharedContext* context,
520 VulkanTexture* colorTexture,
521 VulkanTexture* resolveTexture,
522 VulkanTexture* depthStencilTexture,
523 const RenderPassDesc& renderPassDesc,
524 const VulkanRenderPass& renderPass,
525 const int width,
526 const int height) {
527
528 VulkanTexture* mainTexture = nullptr;
529 if (colorTexture) {
530 mainTexture = resolveTexture ? resolveTexture: colorTexture;
531 } else {
532 SkASSERT(depthStencilTexture);
533 mainTexture = depthStencilTexture;
534 }
535 SkASSERT(mainTexture);
536 VulkanTexture* msaaTexture = resolveTexture ? colorTexture: nullptr;
537
538 sk_sp<VulkanFramebuffer> fb = mainTexture->getCachedFramebuffer(renderPassDesc,
539 msaaTexture,
540 depthStencilTexture);
541 if (fb) {
542 return fb;
543 }
544
545 // Gather attachment views neeeded for frame buffer creation.
546 skia_private::TArray<VkImageView> attachmentViews;
547 gather_attachment_views(attachmentViews, colorTexture, resolveTexture, depthStencilTexture);
548
549 // TODO: Consider caching these in the future. If we pursue that, it may make more sense to
550 // use a compatible renderpass rather than a full one to make each frame buffer more versatile.
551 VkFramebufferCreateInfo framebufferInfo;
552 memset(&framebufferInfo, 0, sizeof(VkFramebufferCreateInfo));
553 framebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
554 framebufferInfo.pNext = nullptr;
555 framebufferInfo.flags = 0;
556 framebufferInfo.renderPass = renderPass.renderPass();
557 framebufferInfo.attachmentCount = attachmentViews.size();
558 framebufferInfo.pAttachments = attachmentViews.begin();
559 framebufferInfo.width = width;
560 framebufferInfo.height = height;
561 framebufferInfo.layers = 1;
562 fb = VulkanFramebuffer::Make(context,
563 framebufferInfo,
564 renderPassDesc,
565 sk_ref_sp(msaaTexture),
566 sk_ref_sp(depthStencilTexture));
567 mainTexture->addCachedFramebuffer(fb);
568 return fb;
569 }
570
onDeleteBackendTexture(const BackendTexture & texture)571 void VulkanResourceProvider::onDeleteBackendTexture(const BackendTexture& texture) {
572 SkASSERT(texture.isValid());
573 SkASSERT(texture.backend() == BackendApi::kVulkan);
574
575 VULKAN_CALL(this->vulkanSharedContext()->interface(),
576 DestroyImage(this->vulkanSharedContext()->device(),
577 BackendTextures::GetVkImage(texture),
578 /*VkAllocationCallbacks=*/nullptr));
579
580 VulkanAlloc alloc = BackendTextures::GetMemoryAlloc(texture);
581 // Free the image memory used for the BackendTexture's VkImage.
582 //
583 // How we do this is dependent upon on how the image was allocated (via the memory allocator or
584 // with a direct call to the Vulkan driver) . If the VulkanAlloc's fBackendMemory is != 0, then
585 // that means the allocator was used. Otherwise, a direct driver call was used and we should
586 // free the VkDeviceMemory (fMemory).
587 if (alloc.fBackendMemory) {
588 skgpu::VulkanMemory::FreeImageMemory(this->vulkanSharedContext()->memoryAllocator(), alloc);
589 } else {
590 SkASSERT(alloc.fMemory != VK_NULL_HANDLE);
591 VULKAN_CALL(this->vulkanSharedContext()->interface(),
592 FreeMemory(this->vulkanSharedContext()->device(), alloc.fMemory, nullptr));
593 }
594 }
595
findOrCreateCompatibleYcbcrConversion(const VulkanYcbcrConversionInfo & ycbcrInfo) const596 sk_sp<VulkanYcbcrConversion> VulkanResourceProvider::findOrCreateCompatibleYcbcrConversion(
597 const VulkanYcbcrConversionInfo& ycbcrInfo) const {
598 static constexpr Budgeted kBudgeted = Budgeted::kYes;
599 static constexpr Shareable kShareable = Shareable::kYes;
600 if (!ycbcrInfo.isValid()) {
601 return nullptr;
602 }
603
604 GraphiteResourceKey key;
605 {
606 static const ResourceType kType = GraphiteResourceKey::GenerateResourceType();
607 static constexpr int kKeySize = 3;
608
609 GraphiteResourceKey::Builder builder(&key, kType, kKeySize);
610 ImmutableSamplerInfo packedInfo = VulkanYcbcrConversion::ToImmutableSamplerInfo(ycbcrInfo);
611 builder[0] = packedInfo.fNonFormatYcbcrConversionInfo;
612 builder[1] = (uint32_t) packedInfo.fFormat;
613 builder[2] = (uint32_t) (packedInfo.fFormat >> 32);
614 }
615
616 if (Resource* resource = fResourceCache->findAndRefResource(key, kBudgeted, kShareable)) {
617 return sk_sp(static_cast<VulkanYcbcrConversion*>(resource));
618 }
619
620 auto ycbcrConversion = VulkanYcbcrConversion::Make(this->vulkanSharedContext(), ycbcrInfo);
621 if (!ycbcrConversion) {
622 return nullptr;
623 }
624
625 fResourceCache->insertResource(ycbcrConversion.get(), key, kBudgeted, kShareable);
626 return ycbcrConversion;
627 }
628
findOrCreateLoadMSAAPipeline(const RenderPassDesc & renderPassDesc)629 sk_sp<VulkanGraphicsPipeline> VulkanResourceProvider::findOrCreateLoadMSAAPipeline(
630 const RenderPassDesc& renderPassDesc) {
631
632 if (!renderPassDesc.fColorResolveAttachment.fTextureInfo.isValid() ||
633 !renderPassDesc.fColorAttachment.fTextureInfo.isValid()) {
634 SKGPU_LOG_E("Loading MSAA from resolve texture requires valid color & resolve attachment");
635 return nullptr;
636 }
637
638 // Check to see if we already have a suitable pipeline that we can use.
639 GraphiteResourceKey renderPassKey =
640 VulkanRenderPass::MakeRenderPassKey(renderPassDesc, /*compatibleOnly=*/true);
641 for (int i = 0; i < fLoadMSAAPipelines.size(); i++) {
642 if (renderPassKey == fLoadMSAAPipelines.at(i).first) {
643 return fLoadMSAAPipelines.at(i).second;
644 }
645 }
646
647 // If any of the load MSAA pipeline creation structures are null then we need to initialize
648 // those before proceeding. If the creation of one of them fails, all are assigned to null, so
649 // we only need to check one of the structures.
650 if (fMSAALoadVertShaderModule == VK_NULL_HANDLE) {
651 SkASSERT(fMSAALoadFragShaderModule == VK_NULL_HANDLE &&
652 fMSAALoadPipelineLayout == VK_NULL_HANDLE);
653 if (!VulkanGraphicsPipeline::InitializeMSAALoadPipelineStructs(
654 this->vulkanSharedContext(),
655 &fMSAALoadVertShaderModule,
656 &fMSAALoadFragShaderModule,
657 &fMSAALoadShaderStageInfo[0],
658 &fMSAALoadPipelineLayout)) {
659 SKGPU_LOG_E("Failed to initialize MSAA load pipeline creation structure(s)");
660 return nullptr;
661 }
662 }
663
664 sk_sp<VulkanRenderPass> compatibleRenderPass =
665 this->findOrCreateRenderPassWithKnownKey(renderPassDesc,
666 /*compatibleOnly=*/true,
667 renderPassKey);
668 if (!compatibleRenderPass) {
669 SKGPU_LOG_E("Failed to make compatible render pass for loading MSAA");
670 }
671
672 sk_sp<VulkanGraphicsPipeline> pipeline = VulkanGraphicsPipeline::MakeLoadMSAAPipeline(
673 this->vulkanSharedContext(),
674 fMSAALoadVertShaderModule,
675 fMSAALoadFragShaderModule,
676 &fMSAALoadShaderStageInfo[0],
677 fMSAALoadPipelineLayout,
678 compatibleRenderPass,
679 this->pipelineCache(),
680 renderPassDesc.fColorAttachment.fTextureInfo);
681
682 if (!pipeline) {
683 SKGPU_LOG_E("Failed to create MSAA load pipeline");
684 return nullptr;
685 }
686
687 fLoadMSAAPipelines.push_back(std::make_pair(renderPassKey, pipeline));
688 return pipeline;
689 }
690
691 #ifdef SK_BUILD_FOR_ANDROID
692
onCreateBackendTexture(AHardwareBuffer * hardwareBuffer,bool isRenderable,bool isProtectedContent,SkISize dimensions,bool fromAndroidWindow) const693 BackendTexture VulkanResourceProvider::onCreateBackendTexture(AHardwareBuffer* hardwareBuffer,
694 bool isRenderable,
695 bool isProtectedContent,
696 SkISize dimensions,
697 bool fromAndroidWindow) const {
698
699 const VulkanSharedContext* vkContext = this->vulkanSharedContext();
700 VkDevice device = vkContext->device();
701 const VulkanCaps& vkCaps = vkContext->vulkanCaps();
702
703 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
704 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
705 if (!skgpu::GetAHardwareBufferProperties(
706 &hwbFormatProps, &hwbProps, vkContext->interface(), hardwareBuffer, device)) {
707 return {};
708 }
709
710 bool importAsExternalFormat = hwbFormatProps.format == VK_FORMAT_UNDEFINED;
711
712 // Start to assemble VulkanTextureInfo which is needed later on to create the VkImage but can
713 // sooner help us query VulkanCaps for certain format feature support.
714 // TODO: Allow client to pass in tiling mode. For external formats, this is required to be
715 // optimal. For AHB that have a known Vulkan format, we can query VulkanCaps to determine if
716 // optimal is a valid decision given the format features.
717 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
718 VkImageCreateFlags imgCreateflags = isProtectedContent ? VK_IMAGE_CREATE_PROTECTED_BIT : 0;
719 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
720 // When importing as an external format the image usage can only be VK_IMAGE_USAGE_SAMPLED_BIT.
721 if (!importAsExternalFormat) {
722 usageFlags |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
723 if (isRenderable) {
724 // Renderable attachments can be used as input attachments if we are loading from MSAA.
725 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
726 }
727 }
728 VulkanTextureInfo vkTexInfo { VK_SAMPLE_COUNT_1_BIT,
729 Mipmapped::kNo,
730 imgCreateflags,
731 hwbFormatProps.format,
732 tiling,
733 usageFlags,
734 VK_SHARING_MODE_EXCLUSIVE,
735 VK_IMAGE_ASPECT_COLOR_BIT,
736 VulkanYcbcrConversionInfo() };
737
738 if (isRenderable && (importAsExternalFormat || !vkCaps.isRenderable(vkTexInfo))) {
739 SKGPU_LOG_W("Renderable texture requested from an AHardwareBuffer which uses a VkFormat "
740 "that Skia cannot render to (VkFormat: %d).\n", hwbFormatProps.format);
741 return {};
742 }
743
744 if (!importAsExternalFormat && (!vkCaps.isTransferSrc(vkTexInfo) ||
745 !vkCaps.isTransferDst(vkTexInfo) ||
746 !vkCaps.isTexturable(vkTexInfo))) {
747 if (isRenderable) {
748 SKGPU_LOG_W("VkFormat %d is either unfamiliar to Skia or doesn't support the necessary"
749 " format features. Because a renerable texture was requested, we cannot "
750 "fall back to importing with an external format.\n", hwbFormatProps.format);
751 return {};
752 }
753 // If the VkFormat does not support the features we need, then import as an external format.
754 importAsExternalFormat = true;
755 // If we use VkExternalFormatANDROID with an externalFormat != 0, then format must =
756 // VK_FORMAT_UNDEFINED.
757 vkTexInfo.fFormat = VK_FORMAT_UNDEFINED;
758 vkTexInfo.fImageUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
759 }
760
761 VulkanYcbcrConversionInfo ycbcrInfo;
762 VkExternalFormatANDROID externalFormat;
763 externalFormat.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
764 externalFormat.pNext = nullptr;
765 externalFormat.externalFormat = 0; // If this is zero it is as if we aren't using this struct.
766 if (importAsExternalFormat) {
767 GetYcbcrConversionInfoFromFormatProps(&ycbcrInfo, hwbFormatProps);
768 if (!ycbcrInfo.isValid()) {
769 SKGPU_LOG_W("Failed to create valid YCbCr conversion information from hardware buffer"
770 "format properties.\n");
771 return {};
772 }
773 vkTexInfo.fYcbcrConversionInfo = ycbcrInfo;
774 externalFormat.externalFormat = hwbFormatProps.externalFormat;
775 }
776 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo{
777 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
778 &externalFormat, // pNext
779 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
780 };
781
782 SkASSERT(!(vkTexInfo.fFlags & VK_IMAGE_CREATE_PROTECTED_BIT) ||
783 fSharedContext->isProtected() == Protected::kYes);
784
785 const VkImageCreateInfo imageCreateInfo = {
786 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
787 &externalMemoryImageInfo, // pNext
788 vkTexInfo.fFlags, // VkImageCreateFlags
789 VK_IMAGE_TYPE_2D, // VkImageType
790 vkTexInfo.fFormat, // VkFormat
791 { (uint32_t)dimensions.fWidth, (uint32_t)dimensions.fHeight, 1 }, // VkExtent3D
792 1, // mipLevels
793 1, // arrayLayers
794 VK_SAMPLE_COUNT_1_BIT, // samples
795 vkTexInfo.fImageTiling, // VkImageTiling
796 vkTexInfo.fImageUsageFlags, // VkImageUsageFlags
797 vkTexInfo.fSharingMode, // VkSharingMode
798 0, // queueFamilyCount
799 nullptr, // pQueueFamilyIndices
800 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
801 };
802
803 VkResult result;
804 VkImage image;
805 result = VULKAN_CALL(vkContext->interface(),
806 CreateImage(device, &imageCreateInfo, nullptr, &image));
807 if (result != VK_SUCCESS) {
808 return {};
809 }
810
811 const VkPhysicalDeviceMemoryProperties2& phyDevMemProps =
812 vkContext->vulkanCaps().physicalDeviceMemoryProperties2();
813 VulkanAlloc alloc;
814 if (!AllocateAndBindImageMemory(&alloc, image, phyDevMemProps, hwbProps, hardwareBuffer,
815 vkContext->interface(), device)) {
816 VULKAN_CALL(vkContext->interface(), DestroyImage(device, image, nullptr));
817 return {};
818 }
819
820 return BackendTextures::MakeVulkan(dimensions,
821 vkTexInfo,
822 VK_IMAGE_LAYOUT_UNDEFINED,
823 VK_QUEUE_FAMILY_FOREIGN_EXT,
824 image,
825 alloc);
826 }
827
828 #endif // SK_BUILD_FOR_ANDROID
829
830 } // namespace skgpu::graphite
831