1 /*
2 * Copyright 2022 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/gpu/graphite/vk/VulkanResourceProvider.h"
9
10 #include "include/core/SkSpan.h"
11 #include "include/gpu/MutableTextureState.h"
12 #include "include/gpu/graphite/BackendTexture.h"
13 #include "include/gpu/graphite/vk/VulkanGraphiteTypes.h"
14 #include "include/gpu/vk/VulkanMutableTextureState.h"
15 #include "src/gpu/graphite/Buffer.h"
16 #include "src/gpu/graphite/ComputePipeline.h"
17 #include "src/gpu/graphite/GraphicsPipeline.h"
18 #include "src/gpu/graphite/RenderPassDesc.h"
19 #include "src/gpu/graphite/Sampler.h"
20 #include "src/gpu/graphite/Texture.h"
21 #include "src/gpu/graphite/vk/VulkanBuffer.h"
22 #include "src/gpu/graphite/vk/VulkanCommandBuffer.h"
23 #include "src/gpu/graphite/vk/VulkanDescriptorPool.h"
24 #include "src/gpu/graphite/vk/VulkanDescriptorSet.h"
25 #include "src/gpu/graphite/vk/VulkanFramebuffer.h"
26 #include "src/gpu/graphite/vk/VulkanGraphicsPipeline.h"
27 #include "src/gpu/graphite/vk/VulkanGraphiteTypesPriv.h"
28 #include "src/gpu/graphite/vk/VulkanRenderPass.h"
29 #include "src/gpu/graphite/vk/VulkanSampler.h"
30 #include "src/gpu/graphite/vk/VulkanSharedContext.h"
31 #include "src/gpu/graphite/vk/VulkanTexture.h"
32 #include "src/gpu/graphite/vk/VulkanYcbcrConversion.h"
33 #include "src/gpu/vk/VulkanMemory.h"
34 #include "src/sksl/SkSLCompiler.h"
35
36 #ifdef SK_BUILD_FOR_ANDROID
37 #include "src/gpu/vk/VulkanUtilsPriv.h"
38 #include <android/hardware_buffer.h>
39 #endif
40
41 namespace skgpu::graphite {
42
43 constexpr int kMaxNumberOfCachedBufferDescSets = 1024;
44
45 namespace {
create_mock_layout(const VulkanSharedContext * sharedContext)46 VkPipelineLayout create_mock_layout(const VulkanSharedContext* sharedContext) {
47 SkASSERT(sharedContext);
48 VkPushConstantRange pushConstantRange;
49 pushConstantRange.offset = 0;
50 pushConstantRange.size = VulkanResourceProvider::kIntrinsicConstantSize;
51 pushConstantRange.stageFlags = VulkanResourceProvider::kIntrinsicConstantStageFlags;
52
53 VkPipelineLayoutCreateInfo layoutCreateInfo;
54 memset(&layoutCreateInfo, 0, sizeof(VkPipelineLayoutCreateFlags));
55 layoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
56 layoutCreateInfo.pNext = nullptr;
57 layoutCreateInfo.flags = 0;
58 layoutCreateInfo.setLayoutCount = 0;
59 layoutCreateInfo.pSetLayouts = nullptr;
60 layoutCreateInfo.pushConstantRangeCount = 1;
61 layoutCreateInfo.pPushConstantRanges = &pushConstantRange;
62
63 VkResult result;
64 VkPipelineLayout layout;
65 VULKAN_CALL_RESULT(sharedContext,
66 result,
67 CreatePipelineLayout(sharedContext->device(),
68 &layoutCreateInfo,
69 /*const VkAllocationCallbacks*=*/nullptr,
70 &layout));
71 return layout;
72 }
73 } // anonymous
VulkanResourceProvider(SharedContext * sharedContext,SingleOwner * singleOwner,uint32_t recorderID,size_t resourceBudget)74 VulkanResourceProvider::VulkanResourceProvider(SharedContext* sharedContext,
75 SingleOwner* singleOwner,
76 uint32_t recorderID,
77 size_t resourceBudget)
78 : ResourceProvider(sharedContext, singleOwner, recorderID, resourceBudget)
79 , fMockPushConstantPipelineLayout(
80 create_mock_layout(static_cast<const VulkanSharedContext*>(sharedContext)))
81 , fUniformBufferDescSetCache(kMaxNumberOfCachedBufferDescSets) {}
82
~VulkanResourceProvider()83 VulkanResourceProvider::~VulkanResourceProvider() {
84 if (fPipelineCache != VK_NULL_HANDLE) {
85 VULKAN_CALL(this->vulkanSharedContext()->interface(),
86 DestroyPipelineCache(this->vulkanSharedContext()->device(),
87 fPipelineCache,
88 nullptr));
89 }
90 if (fMSAALoadVertShaderModule != VK_NULL_HANDLE) {
91 VULKAN_CALL(this->vulkanSharedContext()->interface(),
92 DestroyShaderModule(this->vulkanSharedContext()->device(),
93 fMSAALoadVertShaderModule,
94 nullptr));
95 }
96 if (fMSAALoadFragShaderModule != VK_NULL_HANDLE) {
97 VULKAN_CALL(this->vulkanSharedContext()->interface(),
98 DestroyShaderModule(this->vulkanSharedContext()->device(),
99 fMSAALoadFragShaderModule,
100 nullptr));
101 }
102 if (fMSAALoadPipelineLayout != VK_NULL_HANDLE) {
103 VULKAN_CALL(this->vulkanSharedContext()->interface(),
104 DestroyPipelineLayout(this->vulkanSharedContext()->device(),
105 fMSAALoadPipelineLayout,
106 nullptr));
107 }
108 if (fMockPushConstantPipelineLayout) {
109 VULKAN_CALL(this->vulkanSharedContext()->interface(),
110 DestroyPipelineLayout(this->vulkanSharedContext()->device(),
111 fMockPushConstantPipelineLayout,
112 nullptr));
113 }
114 }
115
vulkanSharedContext() const116 const VulkanSharedContext* VulkanResourceProvider::vulkanSharedContext() const {
117 return static_cast<const VulkanSharedContext*>(fSharedContext);
118 }
119
onCreateWrappedTexture(const BackendTexture & texture)120 sk_sp<Texture> VulkanResourceProvider::onCreateWrappedTexture(const BackendTexture& texture) {
121 sk_sp<VulkanYcbcrConversion> ycbcrConversion;
122 if (TextureInfos::GetVulkanYcbcrConversionInfo(texture.info()).isValid()) {
123 ycbcrConversion = this->findOrCreateCompatibleYcbcrConversion(
124 TextureInfos::GetVulkanYcbcrConversionInfo(texture.info()));
125 if (!ycbcrConversion) {
126 return nullptr;
127 }
128 }
129
130 return VulkanTexture::MakeWrapped(this->vulkanSharedContext(),
131 texture.dimensions(),
132 texture.info(),
133 BackendTextures::GetMutableState(texture),
134 BackendTextures::GetVkImage(texture),
135 /*alloc=*/{} /*Skia does not own wrapped texture memory*/,
136 std::move(ycbcrConversion));
137 }
138
createGraphicsPipeline(const RuntimeEffectDictionary * runtimeDict,const UniqueKey & pipelineKey,const GraphicsPipelineDesc & pipelineDesc,const RenderPassDesc & renderPassDesc,SkEnumBitMask<PipelineCreationFlags> pipelineCreationFlags,uint32_t compilationID)139 sk_sp<GraphicsPipeline> VulkanResourceProvider::createGraphicsPipeline(
140 const RuntimeEffectDictionary* runtimeDict,
141 const UniqueKey& pipelineKey,
142 const GraphicsPipelineDesc& pipelineDesc,
143 const RenderPassDesc& renderPassDesc,
144 SkEnumBitMask<PipelineCreationFlags> pipelineCreationFlags,
145 uint32_t compilationID) {
146 return VulkanGraphicsPipeline::Make(this,
147 runtimeDict,
148 pipelineKey,
149 pipelineDesc,
150 renderPassDesc,
151 pipelineCreationFlags,
152 compilationID);
153 }
154
createComputePipeline(const ComputePipelineDesc &)155 sk_sp<ComputePipeline> VulkanResourceProvider::createComputePipeline(const ComputePipelineDesc&) {
156 return nullptr;
157 }
158
createTexture(SkISize size,const TextureInfo & info,skgpu::Budgeted budgeted)159 sk_sp<Texture> VulkanResourceProvider::createTexture(SkISize size,
160 const TextureInfo& info,
161 skgpu::Budgeted budgeted) {
162 sk_sp<VulkanYcbcrConversion> ycbcrConversion;
163 if (TextureInfos::GetVulkanYcbcrConversionInfo(info).isValid()) {
164 ycbcrConversion = this->findOrCreateCompatibleYcbcrConversion(
165 TextureInfos::GetVulkanYcbcrConversionInfo(info));
166 if (!ycbcrConversion) {
167 return nullptr;
168 }
169 }
170
171 return VulkanTexture::Make(this->vulkanSharedContext(),
172 size,
173 info,
174 budgeted,
175 std::move(ycbcrConversion));
176 }
177
createBuffer(size_t size,BufferType type,AccessPattern accessPattern)178 sk_sp<Buffer> VulkanResourceProvider::createBuffer(size_t size,
179 BufferType type,
180 AccessPattern accessPattern) {
181 return VulkanBuffer::Make(this->vulkanSharedContext(), size, type, accessPattern);
182 }
183
createSampler(const SamplerDesc & samplerDesc)184 sk_sp<Sampler> VulkanResourceProvider::createSampler(const SamplerDesc& samplerDesc) {
185 sk_sp<VulkanYcbcrConversion> ycbcrConversion = nullptr;
186
187 // Non-zero conversion information means the sampler utilizes a ycbcr conversion.
188 bool usesYcbcrConversion = (samplerDesc.desc() >> SamplerDesc::kImmutableSamplerInfoShift) != 0;
189 if (usesYcbcrConversion) {
190 GraphiteResourceKey ycbcrKey = VulkanYcbcrConversion::GetKeyFromSamplerDesc(samplerDesc);
191 if (Resource* resource = fResourceCache->findAndRefResource(ycbcrKey,
192 skgpu::Budgeted::kYes)) {
193 ycbcrConversion =
194 sk_sp<VulkanYcbcrConversion>(static_cast<VulkanYcbcrConversion*>(resource));
195 } else {
196 ycbcrConversion = VulkanYcbcrConversion::Make(
197 this->vulkanSharedContext(),
198 static_cast<uint32_t>(
199 samplerDesc.desc() >> SamplerDesc::kImmutableSamplerInfoShift),
200 (uint64_t)(samplerDesc.externalFormatMSBs()) << 32 | samplerDesc.format());
201 SkASSERT(ycbcrConversion);
202
203 ycbcrConversion->setKey(ycbcrKey);
204 fResourceCache->insertResource(ycbcrConversion.get());
205 }
206 }
207
208 return VulkanSampler::Make(this->vulkanSharedContext(),
209 samplerDesc,
210 std::move(ycbcrConversion));
211 }
212
onCreateBackendTexture(SkISize dimensions,const TextureInfo & info)213 BackendTexture VulkanResourceProvider::onCreateBackendTexture(SkISize dimensions,
214 const TextureInfo& info) {
215 VulkanTextureInfo vkTexInfo;
216 if (!TextureInfos::GetVulkanTextureInfo(info, &vkTexInfo)) {
217 return {};
218 }
219 VulkanTexture::CreatedImageInfo createdTextureInfo;
220 if (!VulkanTexture::MakeVkImage(this->vulkanSharedContext(), dimensions, info,
221 &createdTextureInfo)) {
222 return {};
223 }
224 return BackendTextures::MakeVulkan(
225 dimensions,
226 vkTexInfo,
227 skgpu::MutableTextureStates::GetVkImageLayout(createdTextureInfo.fMutableState.get()),
228 skgpu::MutableTextureStates::GetVkQueueFamilyIndex(
229 createdTextureInfo.fMutableState.get()),
230 createdTextureInfo.fImage,
231 createdTextureInfo.fMemoryAlloc);
232 }
233
234 namespace {
build_desc_set_key(const SkSpan<DescriptorData> & requestedDescriptors)235 GraphiteResourceKey build_desc_set_key(const SkSpan<DescriptorData>& requestedDescriptors) {
236 static const ResourceType kType = GraphiteResourceKey::GenerateResourceType();
237
238 // The number of int32s needed for a key can depend on whether we use immutable samplers or not.
239 // So, accumulte key data while passing through to check for that quantity and simply copy
240 // into builder afterwards.
241 skia_private::TArray<uint32_t> keyData (requestedDescriptors.size() + 1);
242
243 keyData.push_back(requestedDescriptors.size());
244 for (const DescriptorData& desc : requestedDescriptors) {
245 keyData.push_back(static_cast<uint8_t>(desc.fType) << 24 |
246 desc.fBindingIndex << 16 |
247 static_cast<uint16_t>(desc.fCount));
248 if (desc.fImmutableSampler) {
249 const VulkanSampler* sampler =
250 static_cast<const VulkanSampler*>(desc.fImmutableSampler);
251 SkASSERT(sampler);
252 keyData.push_back_n(sampler->samplerDesc().asSpan().size(),
253 sampler->samplerDesc().asSpan().data());
254 }
255 }
256
257 GraphiteResourceKey key;
258 GraphiteResourceKey::Builder builder(&key, kType, keyData.size(), Shareable::kNo);
259
260 for (int i = 0; i < keyData.size(); i++) {
261 builder[i] = keyData[i];
262 }
263
264 builder.finish();
265 return key;
266 }
267
add_new_desc_set_to_cache(const VulkanSharedContext * context,const sk_sp<VulkanDescriptorPool> & pool,const GraphiteResourceKey & descSetKey,ResourceCache * resourceCache)268 sk_sp<VulkanDescriptorSet> add_new_desc_set_to_cache(const VulkanSharedContext* context,
269 const sk_sp<VulkanDescriptorPool>& pool,
270 const GraphiteResourceKey& descSetKey,
271 ResourceCache* resourceCache) {
272 sk_sp<VulkanDescriptorSet> descSet = VulkanDescriptorSet::Make(context, pool);
273 if (!descSet) {
274 return nullptr;
275 }
276 descSet->setKey(descSetKey);
277 resourceCache->insertResource(descSet.get());
278
279 return descSet;
280 }
281 } // anonymous namespace
282
findOrCreateDescriptorSet(SkSpan<DescriptorData> requestedDescriptors)283 sk_sp<VulkanDescriptorSet> VulkanResourceProvider::findOrCreateDescriptorSet(
284 SkSpan<DescriptorData> requestedDescriptors) {
285 if (requestedDescriptors.empty()) {
286 return nullptr;
287 }
288
289 // Search for available descriptor sets by assembling a key based upon the set's structure.
290 GraphiteResourceKey key = build_desc_set_key(requestedDescriptors);
291 if (auto descSet = fResourceCache->findAndRefResource(key, skgpu::Budgeted::kYes)) {
292 // A non-null resource pointer indicates we have found an available descriptor set.
293 return sk_sp<VulkanDescriptorSet>(static_cast<VulkanDescriptorSet*>(descSet));
294 }
295
296
297 // If we did not find an existing avilable desc set, allocate sets with the appropriate layout
298 // and add them to the cache.
299 VkDescriptorSetLayout layout;
300 const VulkanSharedContext* context = this->vulkanSharedContext();
301 DescriptorDataToVkDescSetLayout(context, requestedDescriptors, &layout);
302 if (!layout) {
303 return nullptr;
304 }
305 auto pool = VulkanDescriptorPool::Make(context, requestedDescriptors, layout);
306 if (!pool) {
307 VULKAN_CALL(context->interface(), DestroyDescriptorSetLayout(context->device(),
308 layout,
309 nullptr));
310 return nullptr;
311 }
312
313 // Start with allocating one descriptor set. If one cannot be successfully created, then we can
314 // return early before attempting to allocate more. Storing a ptr to the first set also
315 // allows us to return that later without having to perform a find operation on the cache once
316 // all the sets are added.
317 auto firstDescSet =
318 add_new_desc_set_to_cache(context, pool, key, fResourceCache.get());
319 if (!firstDescSet) {
320 return nullptr;
321 }
322
323 // Continue to allocate & cache the maximum number of sets so they can be easily accessed as
324 // they're needed.
325 for (int i = 1; i < VulkanDescriptorPool::kMaxNumSets ; i++) {
326 auto descSet =
327 add_new_desc_set_to_cache(context, pool, key, fResourceCache.get());
328 if (!descSet) {
329 SKGPU_LOG_W("Descriptor set allocation %d of %d was unsuccessful; no more sets will be"
330 "allocated from this pool.", i, VulkanDescriptorPool::kMaxNumSets);
331 break;
332 }
333 }
334
335 return firstDescSet;
336 }
337
338 namespace {
339
make_ubo_bind_group_key(SkSpan<DescriptorData> requestedDescriptors,SkSpan<BindBufferInfo> bindUniformBufferInfo)340 VulkanResourceProvider::UniformBindGroupKey make_ubo_bind_group_key(
341 SkSpan<DescriptorData> requestedDescriptors,
342 SkSpan<BindBufferInfo> bindUniformBufferInfo) {
343 VulkanResourceProvider::UniformBindGroupKey uniqueKey;
344 {
345 // Each entry in the bind group needs 2 uint32_t in the key:
346 // - buffer's unique ID: 32 bits.
347 // - buffer's binding size: 32 bits.
348 // We need total of 4 entries in the uniform buffer bind group.
349 // Unused entries will be assigned zero values.
350 VulkanResourceProvider::UniformBindGroupKey::Builder builder(&uniqueKey);
351
352 for (uint32_t i = 0; i < VulkanGraphicsPipeline::kNumUniformBuffers; ++i) {
353 builder[2 * i] = 0;
354 builder[2 * i + 1] = 0;
355 }
356
357 for (uint32_t i = 0; i < requestedDescriptors.size(); ++i) {
358 int descriptorBindingIndex = requestedDescriptors[i].fBindingIndex;
359 SkASSERT(SkTo<unsigned long>(descriptorBindingIndex) < bindUniformBufferInfo.size());
360 SkASSERT(SkTo<unsigned long>(descriptorBindingIndex) <
361 VulkanGraphicsPipeline::kNumUniformBuffers);
362 const auto& bindInfo = bindUniformBufferInfo[descriptorBindingIndex];
363 const VulkanBuffer* boundBuffer = static_cast<const VulkanBuffer*>(bindInfo.fBuffer);
364 SkASSERT(boundBuffer);
365 builder[2 * descriptorBindingIndex] = boundBuffer->uniqueID().asUInt();
366 builder[2 * descriptorBindingIndex + 1] = bindInfo.fSize;
367 }
368
369 builder.finish();
370 }
371
372 return uniqueKey;
373 }
374
update_uniform_descriptor_set(SkSpan<DescriptorData> requestedDescriptors,SkSpan<BindBufferInfo> bindUniformBufferInfo,VkDescriptorSet descSet,const VulkanSharedContext * sharedContext)375 void update_uniform_descriptor_set(SkSpan<DescriptorData> requestedDescriptors,
376 SkSpan<BindBufferInfo> bindUniformBufferInfo,
377 VkDescriptorSet descSet,
378 const VulkanSharedContext* sharedContext) {
379 for (size_t i = 0; i < requestedDescriptors.size(); i++) {
380 int descriptorBindingIndex = requestedDescriptors[i].fBindingIndex;
381 SkASSERT(SkTo<unsigned long>(descriptorBindingIndex) < bindUniformBufferInfo.size());
382 const auto& bindInfo = bindUniformBufferInfo[descriptorBindingIndex];
383 if (bindInfo.fBuffer) {
384 #if defined(SK_DEBUG)
385 static uint64_t maxBufferRange =
386 sharedContext->caps()->storageBufferSupport()
387 ? sharedContext->vulkanCaps().maxStorageBufferRange()
388 : sharedContext->vulkanCaps().maxUniformBufferRange();
389 SkASSERT(bindInfo.fSize <= maxBufferRange);
390 #endif
391 VkDescriptorBufferInfo bufferInfo;
392 memset(&bufferInfo, 0, sizeof(VkDescriptorBufferInfo));
393 auto vulkanBuffer = static_cast<const VulkanBuffer*>(bindInfo.fBuffer);
394 bufferInfo.buffer = vulkanBuffer->vkBuffer();
395 bufferInfo.offset = 0; // We always use dynamic ubos so we set the base offset to 0
396 bufferInfo.range = bindInfo.fSize;
397
398 VkWriteDescriptorSet writeInfo;
399 memset(&writeInfo, 0, sizeof(VkWriteDescriptorSet));
400 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
401 writeInfo.pNext = nullptr;
402 writeInfo.dstSet = descSet;
403 writeInfo.dstBinding = descriptorBindingIndex;
404 writeInfo.dstArrayElement = 0;
405 writeInfo.descriptorCount = requestedDescriptors[i].fCount;
406 writeInfo.descriptorType = DsTypeEnumToVkDs(requestedDescriptors[i].fType);
407 writeInfo.pImageInfo = nullptr;
408 writeInfo.pBufferInfo = &bufferInfo;
409 writeInfo.pTexelBufferView = nullptr;
410
411 // TODO(b/293925059): Migrate to updating all the uniform descriptors with one driver
412 // call. Calling UpdateDescriptorSets once to encapsulate updates to all uniform
413 // descriptors would be ideal, but that led to issues with draws where all the UBOs
414 // within that set would unexpectedly be assigned the same offset. Updating them one at
415 // a time within this loop works in the meantime but is suboptimal.
416 VULKAN_CALL(sharedContext->interface(),
417 UpdateDescriptorSets(sharedContext->device(),
418 /*descriptorWriteCount=*/1,
419 &writeInfo,
420 /*descriptorCopyCount=*/0,
421 /*pDescriptorCopies=*/nullptr));
422 }
423 }
424 }
425
426 } // anonymous namespace
427
findOrCreateUniformBuffersDescriptorSet(SkSpan<DescriptorData> requestedDescriptors,SkSpan<BindBufferInfo> bindUniformBufferInfo)428 sk_sp<VulkanDescriptorSet> VulkanResourceProvider::findOrCreateUniformBuffersDescriptorSet(
429 SkSpan<DescriptorData> requestedDescriptors,
430 SkSpan<BindBufferInfo> bindUniformBufferInfo) {
431 SkASSERT(requestedDescriptors.size() <= VulkanGraphicsPipeline::kNumUniformBuffers);
432
433 auto key = make_ubo_bind_group_key(requestedDescriptors, bindUniformBufferInfo);
434 auto* existingDescSet = fUniformBufferDescSetCache.find(key);
435 if (existingDescSet) {
436 return *existingDescSet;
437 }
438 sk_sp<VulkanDescriptorSet> newDS = this->findOrCreateDescriptorSet(requestedDescriptors);
439 if (!newDS) {
440 return nullptr;
441 }
442
443 update_uniform_descriptor_set(requestedDescriptors,
444 bindUniformBufferInfo,
445 *newDS->descriptorSet(),
446 this->vulkanSharedContext());
447 return *fUniformBufferDescSetCache.insert(key, newDS);
448 }
449
450
findOrCreateRenderPassWithKnownKey(const RenderPassDesc & renderPassDesc,bool compatibleOnly,const GraphiteResourceKey & rpKey)451 sk_sp<VulkanRenderPass> VulkanResourceProvider::findOrCreateRenderPassWithKnownKey(
452 const RenderPassDesc& renderPassDesc,
453 bool compatibleOnly,
454 const GraphiteResourceKey& rpKey) {
455 if (Resource* resource =
456 fResourceCache->findAndRefResource(rpKey, skgpu::Budgeted::kYes)) {
457 return sk_sp<VulkanRenderPass>(static_cast<VulkanRenderPass*>(resource));
458 }
459
460 sk_sp<VulkanRenderPass> renderPass =
461 VulkanRenderPass::MakeRenderPass(this->vulkanSharedContext(),
462 renderPassDesc,
463 compatibleOnly);
464 if (!renderPass) {
465 return nullptr;
466 }
467
468 renderPass->setKey(rpKey);
469 fResourceCache->insertResource(renderPass.get());
470
471 return renderPass;
472 }
473
findOrCreateRenderPass(const RenderPassDesc & renderPassDesc,bool compatibleOnly)474 sk_sp<VulkanRenderPass> VulkanResourceProvider::findOrCreateRenderPass(
475 const RenderPassDesc& renderPassDesc, bool compatibleOnly) {
476 GraphiteResourceKey rpKey = VulkanRenderPass::MakeRenderPassKey(renderPassDesc, compatibleOnly);
477
478 return this->findOrCreateRenderPassWithKnownKey(renderPassDesc, compatibleOnly, rpKey);
479 }
480
pipelineCache()481 VkPipelineCache VulkanResourceProvider::pipelineCache() {
482 if (fPipelineCache == VK_NULL_HANDLE) {
483 VkPipelineCacheCreateInfo createInfo;
484 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
485 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
486 createInfo.pNext = nullptr;
487 createInfo.flags = 0;
488 createInfo.initialDataSize = 0;
489 createInfo.pInitialData = nullptr;
490 VkResult result;
491 VULKAN_CALL_RESULT(this->vulkanSharedContext(),
492 result,
493 CreatePipelineCache(this->vulkanSharedContext()->device(),
494 &createInfo,
495 nullptr,
496 &fPipelineCache));
497 if (VK_SUCCESS != result) {
498 fPipelineCache = VK_NULL_HANDLE;
499 }
500 }
501 return fPipelineCache;
502 }
503
createFramebuffer(const VulkanSharedContext * context,const skia_private::TArray<VkImageView> & attachmentViews,const VulkanRenderPass & renderPass,const int width,const int height)504 sk_sp<VulkanFramebuffer> VulkanResourceProvider::createFramebuffer(
505 const VulkanSharedContext* context,
506 const skia_private::TArray<VkImageView>& attachmentViews,
507 const VulkanRenderPass& renderPass,
508 const int width,
509 const int height) {
510 // TODO: Consider caching these in the future. If we pursue that, it may make more sense to
511 // use a compatible renderpass rather than a full one to make each frame buffer more versatile.
512 VkFramebufferCreateInfo framebufferInfo;
513 memset(&framebufferInfo, 0, sizeof(VkFramebufferCreateInfo));
514 framebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
515 framebufferInfo.pNext = nullptr;
516 framebufferInfo.flags = 0;
517 framebufferInfo.renderPass = renderPass.renderPass();
518 framebufferInfo.attachmentCount = attachmentViews.size();
519 framebufferInfo.pAttachments = attachmentViews.begin();
520 framebufferInfo.width = width;
521 framebufferInfo.height = height;
522 framebufferInfo.layers = 1;
523 return VulkanFramebuffer::Make(context, framebufferInfo);
524 }
525
onDeleteBackendTexture(const BackendTexture & texture)526 void VulkanResourceProvider::onDeleteBackendTexture(const BackendTexture& texture) {
527 SkASSERT(texture.isValid());
528 SkASSERT(texture.backend() == BackendApi::kVulkan);
529
530 VULKAN_CALL(this->vulkanSharedContext()->interface(),
531 DestroyImage(this->vulkanSharedContext()->device(),
532 BackendTextures::GetVkImage(texture),
533 /*VkAllocationCallbacks=*/nullptr));
534
535 VulkanAlloc alloc = BackendTextures::GetMemoryAlloc(texture);
536 // Free the image memory used for the BackendTexture's VkImage.
537 //
538 // How we do this is dependent upon on how the image was allocated (via the memory allocator or
539 // with a direct call to the Vulkan driver) . If the VulkanAlloc's fBackendMemory is != 0, then
540 // that means the allocator was used. Otherwise, a direct driver call was used and we should
541 // free the VkDeviceMemory (fMemory).
542 if (alloc.fBackendMemory) {
543 skgpu::VulkanMemory::FreeImageMemory(this->vulkanSharedContext()->memoryAllocator(), alloc);
544 } else {
545 SkASSERT(alloc.fMemory != VK_NULL_HANDLE);
546 VULKAN_CALL(this->vulkanSharedContext()->interface(),
547 FreeMemory(this->vulkanSharedContext()->device(), alloc.fMemory, nullptr));
548 }
549 }
550
findOrCreateCompatibleYcbcrConversion(const VulkanYcbcrConversionInfo & ycbcrInfo) const551 sk_sp<VulkanYcbcrConversion> VulkanResourceProvider::findOrCreateCompatibleYcbcrConversion(
552 const VulkanYcbcrConversionInfo& ycbcrInfo) const {
553 if (!ycbcrInfo.isValid()) {
554 return nullptr;
555 }
556 GraphiteResourceKey ycbcrConversionKey =
557 VulkanYcbcrConversion::MakeYcbcrConversionKey(this->vulkanSharedContext(), ycbcrInfo);
558
559 if (Resource* resource = fResourceCache->findAndRefResource(ycbcrConversionKey,
560 skgpu::Budgeted::kYes)) {
561 return sk_sp<VulkanYcbcrConversion>(static_cast<VulkanYcbcrConversion*>(resource));
562 }
563
564 auto ycbcrConversion = VulkanYcbcrConversion::Make(this->vulkanSharedContext(), ycbcrInfo);
565 if (!ycbcrConversion) {
566 return nullptr;
567 }
568
569 ycbcrConversion->setKey(ycbcrConversionKey);
570 fResourceCache->insertResource(ycbcrConversion.get());
571
572 return ycbcrConversion;
573 }
574
findOrCreateLoadMSAAPipeline(const RenderPassDesc & renderPassDesc)575 sk_sp<VulkanGraphicsPipeline> VulkanResourceProvider::findOrCreateLoadMSAAPipeline(
576 const RenderPassDesc& renderPassDesc) {
577
578 if (!renderPassDesc.fColorResolveAttachment.fTextureInfo.isValid() ||
579 !renderPassDesc.fColorAttachment.fTextureInfo.isValid()) {
580 SKGPU_LOG_E("Loading MSAA from resolve texture requires valid color & resolve attachment");
581 return nullptr;
582 }
583
584 // Check to see if we already have a suitable pipeline that we can use.
585 GraphiteResourceKey renderPassKey =
586 VulkanRenderPass::MakeRenderPassKey(renderPassDesc, /*compatibleOnly=*/true);
587 for (int i = 0; i < fLoadMSAAPipelines.size(); i++) {
588 if (renderPassKey == fLoadMSAAPipelines.at(i).first) {
589 return fLoadMSAAPipelines.at(i).second;
590 }
591 }
592
593 // If any of the load MSAA pipeline creation structures are null then we need to initialize
594 // those before proceeding. If the creation of one of them fails, all are assigned to null, so
595 // we only need to check one of the structures.
596 if (fMSAALoadVertShaderModule == VK_NULL_HANDLE) {
597 SkASSERT(fMSAALoadFragShaderModule == VK_NULL_HANDLE &&
598 fMSAALoadPipelineLayout == VK_NULL_HANDLE);
599 if (!VulkanGraphicsPipeline::InitializeMSAALoadPipelineStructs(
600 this->vulkanSharedContext(),
601 &fMSAALoadVertShaderModule,
602 &fMSAALoadFragShaderModule,
603 &fMSAALoadShaderStageInfo[0],
604 &fMSAALoadPipelineLayout)) {
605 SKGPU_LOG_E("Failed to initialize MSAA load pipeline creation structure(s)");
606 return nullptr;
607 }
608 }
609
610 sk_sp<VulkanRenderPass> compatibleRenderPass =
611 this->findOrCreateRenderPassWithKnownKey(renderPassDesc,
612 /*compatibleOnly=*/true,
613 renderPassKey);
614 if (!compatibleRenderPass) {
615 SKGPU_LOG_E("Failed to make compatible render pass for loading MSAA");
616 }
617
618 sk_sp<VulkanGraphicsPipeline> pipeline = VulkanGraphicsPipeline::MakeLoadMSAAPipeline(
619 this->vulkanSharedContext(),
620 fMSAALoadVertShaderModule,
621 fMSAALoadFragShaderModule,
622 &fMSAALoadShaderStageInfo[0],
623 fMSAALoadPipelineLayout,
624 compatibleRenderPass,
625 this->pipelineCache(),
626 renderPassDesc.fColorAttachment.fTextureInfo);
627
628 if (!pipeline) {
629 SKGPU_LOG_E("Failed to create MSAA load pipeline");
630 return nullptr;
631 }
632
633 fLoadMSAAPipelines.push_back(std::make_pair(renderPassKey, pipeline));
634 return pipeline;
635 }
636
637 #ifdef SK_BUILD_FOR_ANDROID
638
onCreateBackendTexture(AHardwareBuffer * hardwareBuffer,bool isRenderable,bool isProtectedContent,SkISize dimensions,bool fromAndroidWindow) const639 BackendTexture VulkanResourceProvider::onCreateBackendTexture(AHardwareBuffer* hardwareBuffer,
640 bool isRenderable,
641 bool isProtectedContent,
642 SkISize dimensions,
643 bool fromAndroidWindow) const {
644
645 const VulkanSharedContext* vkContext = this->vulkanSharedContext();
646 VkDevice device = vkContext->device();
647 const VulkanCaps& vkCaps = vkContext->vulkanCaps();
648
649 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
650 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
651 if (!skgpu::GetAHardwareBufferProperties(
652 &hwbFormatProps, &hwbProps, vkContext->interface(), hardwareBuffer, device)) {
653 return {};
654 }
655
656 bool importAsExternalFormat = hwbFormatProps.format == VK_FORMAT_UNDEFINED;
657
658 // Start to assemble VulkanTextureInfo which is needed later on to create the VkImage but can
659 // sooner help us query VulkanCaps for certain format feature support.
660 // TODO: Allow client to pass in tiling mode. For external formats, this is required to be
661 // optimal. For AHB that have a known Vulkan format, we can query VulkanCaps to determine if
662 // optimal is a valid decision given the format features.
663 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
664 VkImageCreateFlags imgCreateflags = isProtectedContent ? VK_IMAGE_CREATE_PROTECTED_BIT : 0;
665 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
666 // When importing as an external format the image usage can only be VK_IMAGE_USAGE_SAMPLED_BIT.
667 if (!importAsExternalFormat) {
668 usageFlags |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
669 if (isRenderable) {
670 // Renderable attachments can be used as input attachments if we are loading from MSAA.
671 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
672 }
673 }
674 VulkanTextureInfo vkTexInfo { VK_SAMPLE_COUNT_1_BIT,
675 Mipmapped::kNo,
676 imgCreateflags,
677 hwbFormatProps.format,
678 tiling,
679 usageFlags,
680 VK_SHARING_MODE_EXCLUSIVE,
681 VK_IMAGE_ASPECT_COLOR_BIT,
682 VulkanYcbcrConversionInfo() };
683
684 if (isRenderable && (importAsExternalFormat || !vkCaps.isRenderable(vkTexInfo))) {
685 SKGPU_LOG_W("Renderable texture requested from an AHardwareBuffer which uses a VkFormat "
686 "that Skia cannot render to (VkFormat: %d).\n", hwbFormatProps.format);
687 return {};
688 }
689
690 if (!importAsExternalFormat && (!vkCaps.isTransferSrc(vkTexInfo) ||
691 !vkCaps.isTransferDst(vkTexInfo) ||
692 !vkCaps.isTexturable(vkTexInfo))) {
693 if (isRenderable) {
694 SKGPU_LOG_W("VkFormat %d is either unfamiliar to Skia or doesn't support the necessary"
695 " format features. Because a renerable texture was requested, we cannot "
696 "fall back to importing with an external format.\n", hwbFormatProps.format);
697 return {};
698 }
699 // If the VkFormat does not support the features we need, then import as an external format.
700 importAsExternalFormat = true;
701 // If we use VkExternalFormatANDROID with an externalFormat != 0, then format must =
702 // VK_FORMAT_UNDEFINED.
703 vkTexInfo.fFormat = VK_FORMAT_UNDEFINED;
704 vkTexInfo.fImageUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
705 }
706
707 VulkanYcbcrConversionInfo ycbcrInfo;
708 VkExternalFormatANDROID externalFormat;
709 externalFormat.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
710 externalFormat.pNext = nullptr;
711 externalFormat.externalFormat = 0; // If this is zero it is as if we aren't using this struct.
712 if (importAsExternalFormat) {
713 GetYcbcrConversionInfoFromFormatProps(&ycbcrInfo, hwbFormatProps);
714 if (!ycbcrInfo.isValid()) {
715 SKGPU_LOG_W("Failed to create valid YCbCr conversion information from hardware buffer"
716 "format properties.\n");
717 return {};
718 }
719 vkTexInfo.fYcbcrConversionInfo = ycbcrInfo;
720 externalFormat.externalFormat = hwbFormatProps.externalFormat;
721 }
722 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo{
723 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
724 &externalFormat, // pNext
725 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
726 };
727
728 SkASSERT(!(vkTexInfo.fFlags & VK_IMAGE_CREATE_PROTECTED_BIT) ||
729 fSharedContext->isProtected() == Protected::kYes);
730
731 const VkImageCreateInfo imageCreateInfo = {
732 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
733 &externalMemoryImageInfo, // pNext
734 vkTexInfo.fFlags, // VkImageCreateFlags
735 VK_IMAGE_TYPE_2D, // VkImageType
736 vkTexInfo.fFormat, // VkFormat
737 { (uint32_t)dimensions.fWidth, (uint32_t)dimensions.fHeight, 1 }, // VkExtent3D
738 1, // mipLevels
739 1, // arrayLayers
740 VK_SAMPLE_COUNT_1_BIT, // samples
741 vkTexInfo.fImageTiling, // VkImageTiling
742 vkTexInfo.fImageUsageFlags, // VkImageUsageFlags
743 vkTexInfo.fSharingMode, // VkSharingMode
744 0, // queueFamilyCount
745 nullptr, // pQueueFamilyIndices
746 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
747 };
748
749 VkResult result;
750 VkImage image;
751 result = VULKAN_CALL(vkContext->interface(),
752 CreateImage(device, &imageCreateInfo, nullptr, &image));
753 if (result != VK_SUCCESS) {
754 return {};
755 }
756
757 const VkPhysicalDeviceMemoryProperties2& phyDevMemProps =
758 vkContext->vulkanCaps().physicalDeviceMemoryProperties2();
759 VulkanAlloc alloc;
760 if (!AllocateAndBindImageMemory(&alloc, image, phyDevMemProps, hwbProps, hardwareBuffer,
761 vkContext->interface(), device)) {
762 VULKAN_CALL(vkContext->interface(), DestroyImage(device, image, nullptr));
763 return {};
764 }
765
766 return BackendTextures::MakeVulkan(dimensions,
767 vkTexInfo,
768 VK_IMAGE_LAYOUT_UNDEFINED,
769 VK_QUEUE_FAMILY_FOREIGN_EXT,
770 image,
771 alloc);
772 }
773
774 #endif // SK_BUILD_FOR_ANDROID
775
776 } // namespace skgpu::graphite
777