1 /*
2 * Copyright 2022 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/gpu/graphite/vk/VulkanResourceProvider.h"
9
10 #include "include/core/SkSpan.h"
11 #include "include/gpu/MutableTextureState.h"
12 #include "include/gpu/graphite/BackendTexture.h"
13 #include "include/gpu/graphite/vk/VulkanGraphiteTypes.h"
14 #include "include/gpu/vk/VulkanMutableTextureState.h"
15 #include "src/gpu/graphite/Buffer.h"
16 #include "src/gpu/graphite/ComputePipeline.h"
17 #include "src/gpu/graphite/GraphicsPipeline.h"
18 #include "src/gpu/graphite/RenderPassDesc.h"
19 #include "src/gpu/graphite/Sampler.h"
20 #include "src/gpu/graphite/Texture.h"
21 #include "src/gpu/graphite/vk/VulkanBuffer.h"
22 #include "src/gpu/graphite/vk/VulkanCommandBuffer.h"
23 #include "src/gpu/graphite/vk/VulkanDescriptorPool.h"
24 #include "src/gpu/graphite/vk/VulkanDescriptorSet.h"
25 #include "src/gpu/graphite/vk/VulkanFramebuffer.h"
26 #include "src/gpu/graphite/vk/VulkanGraphicsPipeline.h"
27 #include "src/gpu/graphite/vk/VulkanRenderPass.h"
28 #include "src/gpu/graphite/vk/VulkanSampler.h"
29 #include "src/gpu/graphite/vk/VulkanSharedContext.h"
30 #include "src/gpu/graphite/vk/VulkanTexture.h"
31 #include "src/gpu/graphite/vk/VulkanYcbcrConversion.h"
32 #include "src/gpu/vk/VulkanMemory.h"
33 #include "src/sksl/SkSLCompiler.h"
34
35 #ifdef SK_BUILD_FOR_ANDROID
36 #include "src/gpu/vk/VulkanUtilsPriv.h"
37 #include <android/hardware_buffer.h>
38 #endif
39
40 namespace skgpu::graphite {
41
42 constexpr int kMaxNumberOfCachedBufferDescSets = 1024;
43
VulkanResourceProvider(SharedContext * sharedContext,SingleOwner * singleOwner,uint32_t recorderID,size_t resourceBudget,sk_sp<Buffer> intrinsicConstantUniformBuffer,sk_sp<Buffer> loadMSAAVertexBuffer)44 VulkanResourceProvider::VulkanResourceProvider(SharedContext* sharedContext,
45 SingleOwner* singleOwner,
46 uint32_t recorderID,
47 size_t resourceBudget,
48 sk_sp<Buffer> intrinsicConstantUniformBuffer,
49 sk_sp<Buffer> loadMSAAVertexBuffer)
50 : ResourceProvider(sharedContext, singleOwner, recorderID, resourceBudget)
51 , fIntrinsicUniformBuffer(std::move(intrinsicConstantUniformBuffer))
52 , fLoadMSAAVertexBuffer(std::move(loadMSAAVertexBuffer))
53 , fUniformBufferDescSetCache(kMaxNumberOfCachedBufferDescSets) {}
54
~VulkanResourceProvider()55 VulkanResourceProvider::~VulkanResourceProvider() {
56 if (fPipelineCache != VK_NULL_HANDLE) {
57 VULKAN_CALL(this->vulkanSharedContext()->interface(),
58 DestroyPipelineCache(this->vulkanSharedContext()->device(),
59 fPipelineCache,
60 nullptr));
61 }
62 if (fMSAALoadVertShaderModule != VK_NULL_HANDLE) {
63 VULKAN_CALL(this->vulkanSharedContext()->interface(),
64 DestroyShaderModule(this->vulkanSharedContext()->device(),
65 fMSAALoadVertShaderModule,
66 nullptr));
67 }
68 if (fMSAALoadFragShaderModule != VK_NULL_HANDLE) {
69 VULKAN_CALL(this->vulkanSharedContext()->interface(),
70 DestroyShaderModule(this->vulkanSharedContext()->device(),
71 fMSAALoadFragShaderModule,
72 nullptr));
73 }
74 if (fMSAALoadPipelineLayout != VK_NULL_HANDLE) {
75 VULKAN_CALL(this->vulkanSharedContext()->interface(),
76 DestroyPipelineLayout(this->vulkanSharedContext()->device(),
77 fMSAALoadPipelineLayout,
78 nullptr));
79 }
80 }
81
vulkanSharedContext() const82 const VulkanSharedContext* VulkanResourceProvider::vulkanSharedContext() const {
83 return static_cast<const VulkanSharedContext*>(fSharedContext);
84 }
85
onCreateWrappedTexture(const BackendTexture & texture)86 sk_sp<Texture> VulkanResourceProvider::onCreateWrappedTexture(const BackendTexture& texture) {
87 sk_sp<VulkanYcbcrConversion> ycbcrConversion;
88 if (texture.info().vulkanTextureSpec().fYcbcrConversionInfo.isValid()) {
89 ycbcrConversion = this->findOrCreateCompatibleYcbcrConversion(
90 texture.info().vulkanTextureSpec().fYcbcrConversionInfo);
91 if (!ycbcrConversion) {
92 return nullptr;
93 }
94 }
95
96 return VulkanTexture::MakeWrapped(this->vulkanSharedContext(),
97 texture.dimensions(),
98 texture.info(),
99 texture.getMutableState(),
100 texture.getVkImage(),
101 /*alloc=*/{} /*Skia does not own wrapped texture memory*/,
102 std::move(ycbcrConversion));
103 }
104
refIntrinsicConstantBuffer() const105 sk_sp<Buffer> VulkanResourceProvider::refIntrinsicConstantBuffer() const {
106 return fIntrinsicUniformBuffer;
107 }
108
loadMSAAVertexBuffer() const109 const Buffer* VulkanResourceProvider::loadMSAAVertexBuffer() const {
110 return fLoadMSAAVertexBuffer.get();
111 }
112
createGraphicsPipeline(const RuntimeEffectDictionary * runtimeDict,const GraphicsPipelineDesc & pipelineDesc,const RenderPassDesc & renderPassDesc)113 sk_sp<GraphicsPipeline> VulkanResourceProvider::createGraphicsPipeline(
114 const RuntimeEffectDictionary* runtimeDict,
115 const GraphicsPipelineDesc& pipelineDesc,
116 const RenderPassDesc& renderPassDesc) {
117 auto compatibleRenderPass =
118 this->findOrCreateRenderPass(renderPassDesc, /*compatibleOnly=*/true);
119 return VulkanGraphicsPipeline::Make(this->vulkanSharedContext(),
120 runtimeDict,
121 pipelineDesc,
122 renderPassDesc,
123 compatibleRenderPass,
124 this->pipelineCache());
125 }
126
createComputePipeline(const ComputePipelineDesc &)127 sk_sp<ComputePipeline> VulkanResourceProvider::createComputePipeline(const ComputePipelineDesc&) {
128 return nullptr;
129 }
130
createTexture(SkISize size,const TextureInfo & info,skgpu::Budgeted budgeted)131 sk_sp<Texture> VulkanResourceProvider::createTexture(SkISize size,
132 const TextureInfo& info,
133 skgpu::Budgeted budgeted) {
134 sk_sp<VulkanYcbcrConversion> ycbcrConversion;
135 if (info.vulkanTextureSpec().fYcbcrConversionInfo.isValid()) {
136 ycbcrConversion = this->findOrCreateCompatibleYcbcrConversion(
137 info.vulkanTextureSpec().fYcbcrConversionInfo);
138 if (!ycbcrConversion) {
139 return nullptr;
140 }
141 }
142
143 return VulkanTexture::Make(this->vulkanSharedContext(),
144 size,
145 info,
146 budgeted,
147 std::move(ycbcrConversion));
148 }
149
createBuffer(size_t size,BufferType type,AccessPattern accessPattern)150 sk_sp<Buffer> VulkanResourceProvider::createBuffer(size_t size,
151 BufferType type,
152 AccessPattern accessPattern) {
153 return VulkanBuffer::Make(this->vulkanSharedContext(), size, type, accessPattern);
154 }
155
createSampler(const SamplerDesc & samplerDesc)156 sk_sp<Sampler> VulkanResourceProvider::createSampler(const SamplerDesc& samplerDesc) {
157 sk_sp<VulkanYcbcrConversion> ycbcrConversion = nullptr;
158
159 // Non-zero conversion information means the sampler utilizes a ycbcr conversion.
160 bool usesYcbcrConversion = (samplerDesc.desc() >> SamplerDesc::kImmutableSamplerInfoShift) != 0;
161 if (usesYcbcrConversion) {
162 GraphiteResourceKey ycbcrKey = VulkanYcbcrConversion::GetKeyFromSamplerDesc(samplerDesc);
163 if (Resource* resource = fResourceCache->findAndRefResource(ycbcrKey,
164 skgpu::Budgeted::kYes)) {
165 ycbcrConversion =
166 sk_sp<VulkanYcbcrConversion>(static_cast<VulkanYcbcrConversion*>(resource));
167 } else {
168 ycbcrConversion = VulkanYcbcrConversion::Make(
169 this->vulkanSharedContext(),
170 static_cast<uint32_t>(
171 samplerDesc.desc() >> SamplerDesc::kImmutableSamplerInfoShift),
172 (uint64_t)(samplerDesc.externalFormatMSBs()) << 32 | samplerDesc.format());
173 SkASSERT(ycbcrConversion);
174
175 ycbcrConversion->setKey(ycbcrKey);
176 fResourceCache->insertResource(ycbcrConversion.get());
177 }
178 }
179
180 return VulkanSampler::Make(this->vulkanSharedContext(),
181 samplerDesc.samplingOptions(),
182 samplerDesc.tileModeX(),
183 samplerDesc.tileModeY(),
184 std::move(ycbcrConversion));
185 }
186
onCreateBackendTexture(SkISize dimensions,const TextureInfo & info)187 BackendTexture VulkanResourceProvider::onCreateBackendTexture(SkISize dimensions,
188 const TextureInfo& info) {
189 VulkanTextureInfo vkTexInfo;
190 if (!info.getVulkanTextureInfo(&vkTexInfo)) {
191 return {};
192 }
193 VulkanTexture::CreatedImageInfo createdTextureInfo;
194 if (!VulkanTexture::MakeVkImage(this->vulkanSharedContext(), dimensions, info,
195 &createdTextureInfo)) {
196 return {};
197 }
198 return {dimensions,
199 vkTexInfo,
200 skgpu::MutableTextureStates::GetVkImageLayout(createdTextureInfo.fMutableState.get()),
201 skgpu::MutableTextureStates::GetVkQueueFamilyIndex(createdTextureInfo.fMutableState.get()),
202 createdTextureInfo.fImage,
203 createdTextureInfo.fMemoryAlloc};
204 }
205
206 namespace {
build_desc_set_key(const SkSpan<DescriptorData> & requestedDescriptors)207 GraphiteResourceKey build_desc_set_key(const SkSpan<DescriptorData>& requestedDescriptors) {
208 static const ResourceType kType = GraphiteResourceKey::GenerateResourceType();
209
210 const int num32DataCnt = requestedDescriptors.size() + 1;
211
212 GraphiteResourceKey key;
213 GraphiteResourceKey::Builder builder(&key, kType, num32DataCnt, Shareable::kNo);
214
215 builder[0] = requestedDescriptors.size();
216 for (int i = 1; i < num32DataCnt; i++) {
217 const auto& currDesc = requestedDescriptors[i - 1];
218 // TODO: Consider making the DescriptorData struct itself just use uint16_t.
219 uint16_t smallerCount = static_cast<uint16_t>(currDesc.fCount);
220 builder[i] = static_cast<uint8_t>(currDesc.fType) << 24 |
221 currDesc.fBindingIndex << 16 |
222 smallerCount;
223 }
224 builder.finish();
225 return key;
226 }
227
add_new_desc_set_to_cache(const VulkanSharedContext * context,const sk_sp<VulkanDescriptorPool> & pool,const GraphiteResourceKey & descSetKey,ResourceCache * resourceCache)228 sk_sp<VulkanDescriptorSet> add_new_desc_set_to_cache(const VulkanSharedContext* context,
229 const sk_sp<VulkanDescriptorPool>& pool,
230 const GraphiteResourceKey& descSetKey,
231 ResourceCache* resourceCache) {
232 sk_sp<VulkanDescriptorSet> descSet = VulkanDescriptorSet::Make(context, pool);
233 if (!descSet) {
234 return nullptr;
235 }
236 descSet->setKey(descSetKey);
237 resourceCache->insertResource(descSet.get());
238
239 return descSet;
240 }
241 } // anonymous namespace
242
findOrCreateDescriptorSet(SkSpan<DescriptorData> requestedDescriptors)243 sk_sp<VulkanDescriptorSet> VulkanResourceProvider::findOrCreateDescriptorSet(
244 SkSpan<DescriptorData> requestedDescriptors) {
245 if (requestedDescriptors.empty()) {
246 return nullptr;
247 }
248 // Search for available descriptor sets by assembling a key based upon the set's structure.
249 GraphiteResourceKey key = build_desc_set_key(requestedDescriptors);
250 if (auto descSet = fResourceCache->findAndRefResource(key, skgpu::Budgeted::kYes)) {
251 // A non-null resource pointer indicates we have found an available descriptor set.
252 return sk_sp<VulkanDescriptorSet>(static_cast<VulkanDescriptorSet*>(descSet));
253 }
254
255
256 // If we did not find an existing avilable desc set, allocate sets with the appropriate layout
257 // and add them to the cache.
258 VkDescriptorSetLayout layout;
259 const VulkanSharedContext* context = this->vulkanSharedContext();
260 DescriptorDataToVkDescSetLayout(context, requestedDescriptors, &layout);
261 if (!layout) {
262 return nullptr;
263 }
264 auto pool = VulkanDescriptorPool::Make(context, requestedDescriptors, layout);
265 if (!pool) {
266 VULKAN_CALL(context->interface(), DestroyDescriptorSetLayout(context->device(),
267 layout,
268 nullptr));
269 return nullptr;
270 }
271
272 // Start with allocating one descriptor set. If one cannot be successfully created, then we can
273 // return early before attempting to allocate more. Storing a ptr to the first set also
274 // allows us to return that later without having to perform a find operation on the cache once
275 // all the sets are added.
276 auto firstDescSet =
277 add_new_desc_set_to_cache(context, pool, key, fResourceCache.get());
278 if (!firstDescSet) {
279 return nullptr;
280 }
281
282 // Continue to allocate & cache the maximum number of sets so they can be easily accessed as
283 // they're needed.
284 for (int i = 1; i < VulkanDescriptorPool::kMaxNumSets ; i++) {
285 auto descSet =
286 add_new_desc_set_to_cache(context, pool, key, fResourceCache.get());
287 if (!descSet) {
288 SKGPU_LOG_W("Descriptor set allocation %d of %d was unsuccessful; no more sets will be"
289 "allocated from this pool.", i, VulkanDescriptorPool::kMaxNumSets);
290 break;
291 }
292 }
293
294 return firstDescSet;
295 }
296
297 namespace {
make_ubo_bind_group_key(SkSpan<DescriptorData> requestedDescriptors,SkSpan<BindUniformBufferInfo> bindUniformBufferInfo)298 UniqueKey make_ubo_bind_group_key(SkSpan<DescriptorData> requestedDescriptors,
299 SkSpan<BindUniformBufferInfo> bindUniformBufferInfo) {
300 static const UniqueKey::Domain kBufferBindGroupDomain = UniqueKey::GenerateDomain();
301
302 UniqueKey uniqueKey;
303 {
304 // Each entry in the bind group needs 2 uint32_t in the key:
305 // - buffer's unique ID: 32 bits.
306 // - buffer's binding size: 32 bits.
307 // We need total of 3 entries in the uniform buffer bind group.
308 // Unused entries will be assigned zero values.
309 UniqueKey::Builder builder(
310 &uniqueKey, kBufferBindGroupDomain, 6, "GraphicsPipelineBufferDescSet");
311
312 for (uint32_t i = 0; i < VulkanGraphicsPipeline::kNumUniformBuffers; ++i) {
313 builder[2 * i] = 0;
314 builder[2 * i + 1] = 0;
315 }
316
317 for (uint32_t i = 0; i < requestedDescriptors.size(); ++i) {
318 int descriptorBindingIndex = requestedDescriptors[i].fBindingIndex;
319 SkASSERT(SkTo<unsigned long>(descriptorBindingIndex) < bindUniformBufferInfo.size());
320 SkASSERT(SkTo<unsigned long>(descriptorBindingIndex) <
321 VulkanGraphicsPipeline::kNumUniformBuffers);
322 const auto& bindInfo = bindUniformBufferInfo[descriptorBindingIndex];
323 const VulkanBuffer* boundBuffer = static_cast<const VulkanBuffer*>(bindInfo.fBuffer);
324 SkASSERT(boundBuffer);
325 const uint32_t bindingSize = bindInfo.fBindingSize;
326 builder[2 * descriptorBindingIndex] = boundBuffer->uniqueID().asUInt();
327 builder[2 * descriptorBindingIndex + 1] = bindingSize;
328 }
329
330 builder.finish();
331 }
332
333 return uniqueKey;
334 }
335
update_uniform_descriptor_set(SkSpan<DescriptorData> requestedDescriptors,SkSpan<BindUniformBufferInfo> bindUniformBufferInfo,VkDescriptorSet descSet,const VulkanSharedContext * sharedContext)336 void update_uniform_descriptor_set(SkSpan<DescriptorData> requestedDescriptors,
337 SkSpan<BindUniformBufferInfo> bindUniformBufferInfo,
338 VkDescriptorSet descSet,
339 const VulkanSharedContext* sharedContext) {
340 for (size_t i = 0; i < requestedDescriptors.size(); i++) {
341 int descriptorBindingIndex = requestedDescriptors[i].fBindingIndex;
342 SkASSERT(SkTo<unsigned long>(descriptorBindingIndex) < bindUniformBufferInfo.size());
343 const auto& bindInfo = bindUniformBufferInfo[descriptorBindingIndex];
344 if (bindInfo.fBuffer) {
345 #if defined(SK_DEBUG)
346 static uint64_t maxUniformBufferRange =
347 sharedContext->vulkanCaps().maxUniformBufferRange();
348 SkASSERT(bindInfo.fBindingSize <= maxUniformBufferRange);
349 #endif
350 VkDescriptorBufferInfo bufferInfo;
351 memset(&bufferInfo, 0, sizeof(VkDescriptorBufferInfo));
352 auto vulkanBuffer = static_cast<const VulkanBuffer*>(bindInfo.fBuffer);
353 bufferInfo.buffer = vulkanBuffer->vkBuffer();
354 bufferInfo.offset = 0; // We always use dynamic ubos so we set the base offset to 0
355 bufferInfo.range = bindInfo.fBindingSize;
356
357 VkWriteDescriptorSet writeInfo;
358 memset(&writeInfo, 0, sizeof(VkWriteDescriptorSet));
359 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
360 writeInfo.pNext = nullptr;
361 writeInfo.dstSet = descSet;
362 writeInfo.dstBinding = descriptorBindingIndex;
363 writeInfo.dstArrayElement = 0;
364 writeInfo.descriptorCount = requestedDescriptors[i].fCount;
365 writeInfo.descriptorType = DsTypeEnumToVkDs(requestedDescriptors[i].fType);
366 writeInfo.pImageInfo = nullptr;
367 writeInfo.pBufferInfo = &bufferInfo;
368 writeInfo.pTexelBufferView = nullptr;
369
370 // TODO(b/293925059): Migrate to updating all the uniform descriptors with one driver
371 // call. Calling UpdateDescriptorSets once to encapsulate updates to all uniform
372 // descriptors would be ideal, but that led to issues with draws where all the UBOs
373 // within that set would unexpectedly be assigned the same offset. Updating them one at
374 // a time within this loop works in the meantime but is suboptimal.
375 VULKAN_CALL(sharedContext->interface(),
376 UpdateDescriptorSets(sharedContext->device(),
377 /*descriptorWriteCount=*/1,
378 &writeInfo,
379 /*descriptorCopyCount=*/0,
380 /*pDescriptorCopies=*/nullptr));
381 }
382 }
383 }
384
385 } // anonymous namespace
386
findOrCreateUniformBuffersDescriptorSet(SkSpan<DescriptorData> requestedDescriptors,SkSpan<BindUniformBufferInfo> bindUniformBufferInfo)387 sk_sp<VulkanDescriptorSet> VulkanResourceProvider::findOrCreateUniformBuffersDescriptorSet(
388 SkSpan<DescriptorData> requestedDescriptors,
389 SkSpan<BindUniformBufferInfo> bindUniformBufferInfo) {
390 SkASSERT(requestedDescriptors.size() <= VulkanGraphicsPipeline::kNumUniformBuffers);
391
392 auto key = make_ubo_bind_group_key(requestedDescriptors, bindUniformBufferInfo);
393 auto* existingDescSet = fUniformBufferDescSetCache.find(key);
394 if (existingDescSet) {
395 return *existingDescSet;
396 }
397 sk_sp<VulkanDescriptorSet> newDS = this->findOrCreateDescriptorSet(requestedDescriptors);
398 if (!newDS) {
399 return nullptr;
400 }
401
402 update_uniform_descriptor_set(requestedDescriptors,
403 bindUniformBufferInfo,
404 *newDS->descriptorSet(),
405 this->vulkanSharedContext());
406 return *fUniformBufferDescSetCache.insert(key, newDS);
407 }
408
409
findOrCreateRenderPassWithKnownKey(const RenderPassDesc & renderPassDesc,bool compatibleOnly,const GraphiteResourceKey & rpKey)410 sk_sp<VulkanRenderPass> VulkanResourceProvider::findOrCreateRenderPassWithKnownKey(
411 const RenderPassDesc& renderPassDesc,
412 bool compatibleOnly,
413 const GraphiteResourceKey& rpKey) {
414 if (Resource* resource =
415 fResourceCache->findAndRefResource(rpKey, skgpu::Budgeted::kYes)) {
416 return sk_sp<VulkanRenderPass>(static_cast<VulkanRenderPass*>(resource));
417 }
418
419 sk_sp<VulkanRenderPass> renderPass =
420 VulkanRenderPass::MakeRenderPass(this->vulkanSharedContext(),
421 renderPassDesc,
422 compatibleOnly);
423 if (!renderPass) {
424 return nullptr;
425 }
426
427 renderPass->setKey(rpKey);
428 fResourceCache->insertResource(renderPass.get());
429
430 return renderPass;
431 }
432
findOrCreateRenderPass(const RenderPassDesc & renderPassDesc,bool compatibleOnly)433 sk_sp<VulkanRenderPass> VulkanResourceProvider::findOrCreateRenderPass(
434 const RenderPassDesc& renderPassDesc, bool compatibleOnly) {
435 GraphiteResourceKey rpKey = VulkanRenderPass::MakeRenderPassKey(renderPassDesc, compatibleOnly);
436
437 return this->findOrCreateRenderPassWithKnownKey(renderPassDesc, compatibleOnly, rpKey);
438 }
439
pipelineCache()440 VkPipelineCache VulkanResourceProvider::pipelineCache() {
441 if (fPipelineCache == VK_NULL_HANDLE) {
442 VkPipelineCacheCreateInfo createInfo;
443 memset(&createInfo, 0, sizeof(VkPipelineCacheCreateInfo));
444 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
445 createInfo.pNext = nullptr;
446 createInfo.flags = 0;
447 createInfo.initialDataSize = 0;
448 createInfo.pInitialData = nullptr;
449 VkResult result;
450 VULKAN_CALL_RESULT(this->vulkanSharedContext(),
451 result,
452 CreatePipelineCache(this->vulkanSharedContext()->device(),
453 &createInfo,
454 nullptr,
455 &fPipelineCache));
456 if (VK_SUCCESS != result) {
457 fPipelineCache = VK_NULL_HANDLE;
458 }
459 }
460 return fPipelineCache;
461 }
462
createFramebuffer(const VulkanSharedContext * context,const skia_private::TArray<VkImageView> & attachmentViews,const VulkanRenderPass & renderPass,const int width,const int height)463 sk_sp<VulkanFramebuffer> VulkanResourceProvider::createFramebuffer(
464 const VulkanSharedContext* context,
465 const skia_private::TArray<VkImageView>& attachmentViews,
466 const VulkanRenderPass& renderPass,
467 const int width,
468 const int height) {
469 // TODO: Consider caching these in the future. If we pursue that, it may make more sense to
470 // use a compatible renderpass rather than a full one to make each frame buffer more versatile.
471 VkFramebufferCreateInfo framebufferInfo;
472 memset(&framebufferInfo, 0, sizeof(VkFramebufferCreateInfo));
473 framebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
474 framebufferInfo.pNext = nullptr;
475 framebufferInfo.flags = 0;
476 framebufferInfo.renderPass = renderPass.renderPass();
477 framebufferInfo.attachmentCount = attachmentViews.size();
478 framebufferInfo.pAttachments = attachmentViews.begin();
479 framebufferInfo.width = width;
480 framebufferInfo.height = height;
481 framebufferInfo.layers = 1;
482 return VulkanFramebuffer::Make(context, framebufferInfo);
483 }
484
onDeleteBackendTexture(const BackendTexture & texture)485 void VulkanResourceProvider::onDeleteBackendTexture(const BackendTexture& texture) {
486 SkASSERT(texture.isValid());
487 SkASSERT(texture.backend() == BackendApi::kVulkan);
488
489 VULKAN_CALL(this->vulkanSharedContext()->interface(),
490 DestroyImage(this->vulkanSharedContext()->device(), texture.getVkImage(),
491 /*VkAllocationCallbacks=*/nullptr));
492
493 // Free the image memory used for the BackendTexture's VkImage.
494 //
495 // How we do this is dependent upon on how the image was allocated (via the memory allocator or
496 // with a direct call to the Vulkan driver) . If the VulkanAlloc's fBackendMemory is != 0, then
497 // that means the allocator was used. Otherwise, a direct driver call was used and we should
498 // free the VkDeviceMemory (fMemory).
499 if (texture.getMemoryAlloc()->fBackendMemory) {
500 skgpu::VulkanMemory::FreeImageMemory(this->vulkanSharedContext()->memoryAllocator(),
501 *(texture.getMemoryAlloc()));
502 } else {
503 SkASSERT(texture.getMemoryAlloc()->fMemory != VK_NULL_HANDLE);
504 VULKAN_CALL(this->vulkanSharedContext()->interface(),
505 FreeMemory(this->vulkanSharedContext()->device(),
506 texture.getMemoryAlloc()->fMemory,
507 nullptr));
508 }
509 }
510
findOrCreateCompatibleYcbcrConversion(const VulkanYcbcrConversionInfo & ycbcrInfo) const511 sk_sp<VulkanYcbcrConversion> VulkanResourceProvider::findOrCreateCompatibleYcbcrConversion(
512 const VulkanYcbcrConversionInfo& ycbcrInfo) const {
513 if (!ycbcrInfo.isValid()) {
514 return nullptr;
515 }
516 GraphiteResourceKey ycbcrConversionKey =
517 VulkanYcbcrConversion::MakeYcbcrConversionKey(this->vulkanSharedContext(), ycbcrInfo);
518
519 if (Resource* resource = fResourceCache->findAndRefResource(ycbcrConversionKey,
520 skgpu::Budgeted::kYes)) {
521 return sk_sp<VulkanYcbcrConversion>(static_cast<VulkanYcbcrConversion*>(resource));
522 }
523
524 auto ycbcrConversion = VulkanYcbcrConversion::Make(this->vulkanSharedContext(), ycbcrInfo);
525 if (!ycbcrConversion) {
526 return nullptr;
527 }
528
529 ycbcrConversion->setKey(ycbcrConversionKey);
530 fResourceCache->insertResource(ycbcrConversion.get());
531
532 return ycbcrConversion;
533 }
534
findOrCreateLoadMSAAPipeline(const RenderPassDesc & renderPassDesc)535 sk_sp<VulkanGraphicsPipeline> VulkanResourceProvider::findOrCreateLoadMSAAPipeline(
536 const RenderPassDesc& renderPassDesc) {
537
538 if (!renderPassDesc.fColorResolveAttachment.fTextureInfo.isValid() ||
539 !renderPassDesc.fColorAttachment.fTextureInfo.isValid()) {
540 SKGPU_LOG_E("Loading MSAA from resolve texture requires valid color & resolve attachment");
541 return nullptr;
542 }
543
544 // Check to see if we already have a suitable pipeline that we can use.
545 GraphiteResourceKey renderPassKey =
546 VulkanRenderPass::MakeRenderPassKey(renderPassDesc, /*compatibleOnly=*/true);
547 for (int i = 0; i < fLoadMSAAPipelines.size(); i++) {
548 if (renderPassKey == fLoadMSAAPipelines.at(i).first) {
549 return fLoadMSAAPipelines.at(i).second;
550 }
551 }
552
553 // If any of the load MSAA pipeline creation structures are null then we need to initialize
554 // those before proceeding. If the creation of one of them fails, all are assigned to null, so
555 // we only need to check one of the structures.
556 if (fMSAALoadVertShaderModule == VK_NULL_HANDLE) {
557 SkASSERT(fMSAALoadFragShaderModule == VK_NULL_HANDLE &&
558 fMSAALoadPipelineLayout == VK_NULL_HANDLE);
559 if (!VulkanGraphicsPipeline::InitializeMSAALoadPipelineStructs(
560 this->vulkanSharedContext(),
561 &fMSAALoadVertShaderModule,
562 &fMSAALoadFragShaderModule,
563 &fMSAALoadShaderStageInfo[0],
564 &fMSAALoadPipelineLayout)) {
565 SKGPU_LOG_E("Failed to initialize MSAA load pipeline creation structure(s)");
566 return nullptr;
567 }
568 }
569
570 sk_sp<VulkanRenderPass> compatibleRenderPass =
571 this->findOrCreateRenderPassWithKnownKey(renderPassDesc,
572 /*compatibleOnly=*/true,
573 renderPassKey);
574 if (!compatibleRenderPass) {
575 SKGPU_LOG_E("Failed to make compatible render pass for loading MSAA");
576 }
577
578 sk_sp<VulkanGraphicsPipeline> pipeline = VulkanGraphicsPipeline::MakeLoadMSAAPipeline(
579 this->vulkanSharedContext(),
580 fMSAALoadVertShaderModule,
581 fMSAALoadFragShaderModule,
582 &fMSAALoadShaderStageInfo[0],
583 fMSAALoadPipelineLayout,
584 compatibleRenderPass,
585 this->pipelineCache(),
586 renderPassDesc.fColorAttachment.fTextureInfo);
587
588 if (!pipeline) {
589 SKGPU_LOG_E("Failed to create MSAA load pipeline");
590 return nullptr;
591 }
592
593 fLoadMSAAPipelines.push_back(std::make_pair(renderPassKey, pipeline));
594 return pipeline;
595 }
596
597 #ifdef SK_BUILD_FOR_ANDROID
598
onCreateBackendTexture(AHardwareBuffer * hardwareBuffer,bool isRenderable,bool isProtectedContent,SkISize dimensions,bool fromAndroidWindow) const599 BackendTexture VulkanResourceProvider::onCreateBackendTexture(AHardwareBuffer* hardwareBuffer,
600 bool isRenderable,
601 bool isProtectedContent,
602 SkISize dimensions,
603 bool fromAndroidWindow) const {
604
605 const VulkanSharedContext* vkContext = this->vulkanSharedContext();
606 VkDevice device = vkContext->device();
607 const VulkanCaps& vkCaps = vkContext->vulkanCaps();
608
609 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
610 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
611 if (!skgpu::GetAHardwareBufferProperties(
612 &hwbFormatProps, &hwbProps, vkContext->interface(), hardwareBuffer, device)) {
613 return {};
614 }
615
616 bool importAsExternalFormat = hwbFormatProps.format == VK_FORMAT_UNDEFINED;
617
618 // Start to assemble VulkanTextureInfo which is needed later on to create the VkImage but can
619 // sooner help us query VulkanCaps for certain format feature support.
620 // TODO: Allow client to pass in tiling mode. For external formats, this is required to be
621 // optimal. For AHB that have a known Vulkan format, we can query VulkanCaps to determine if
622 // optimal is a valid decision given the format features.
623 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
624 VkImageCreateFlags imgCreateflags = isProtectedContent ? VK_IMAGE_CREATE_PROTECTED_BIT : 0;
625 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
626 // When importing as an external format the image usage can only be VK_IMAGE_USAGE_SAMPLED_BIT.
627 if (!importAsExternalFormat) {
628 usageFlags |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
629 if (isRenderable) {
630 // Renderable attachments can be used as input attachments if we are loading from MSAA.
631 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
632 }
633 }
634 VulkanTextureInfo vkTexInfo { VK_SAMPLE_COUNT_1_BIT,
635 Mipmapped::kNo,
636 imgCreateflags,
637 hwbFormatProps.format,
638 tiling,
639 usageFlags,
640 VK_SHARING_MODE_EXCLUSIVE,
641 VK_IMAGE_ASPECT_COLOR_BIT,
642 VulkanYcbcrConversionInfo() };
643
644 if (isRenderable && (importAsExternalFormat || !vkCaps.isRenderable(vkTexInfo))) {
645 SKGPU_LOG_W("Renderable texture requested from an AHardwareBuffer which uses a VkFormat "
646 "that Skia cannot render to (VkFormat: %d).\n", hwbFormatProps.format);
647 return {};
648 }
649
650 if (!importAsExternalFormat && (!vkCaps.isTransferSrc(vkTexInfo) ||
651 !vkCaps.isTransferDst(vkTexInfo) ||
652 !vkCaps.isTexturable(vkTexInfo))) {
653 if (isRenderable) {
654 SKGPU_LOG_W("VkFormat %d is either unfamiliar to Skia or doesn't support the necessary"
655 " format features. Because a renerable texture was requested, we cannot "
656 "fall back to importing with an external format.\n", hwbFormatProps.format);
657 return {};
658 }
659 // If the VkFormat does not support the features we need, then import as an external format.
660 importAsExternalFormat = true;
661 // If we use VkExternalFormatANDROID with an externalFormat != 0, then format must =
662 // VK_FORMAT_UNDEFINED.
663 vkTexInfo.fFormat = VK_FORMAT_UNDEFINED;
664 vkTexInfo.fImageUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
665 }
666
667 VulkanYcbcrConversionInfo ycbcrInfo;
668 VkExternalFormatANDROID externalFormat;
669 externalFormat.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
670 externalFormat.pNext = nullptr;
671 externalFormat.externalFormat = 0; // If this is zero it is as if we aren't using this struct.
672 if (importAsExternalFormat) {
673 GetYcbcrConversionInfoFromFormatProps(&ycbcrInfo, hwbFormatProps);
674 if (!ycbcrInfo.isValid()) {
675 SKGPU_LOG_W("Failed to create valid YCbCr conversion information from hardware buffer"
676 "format properties.\n");
677 return {};
678 }
679 externalFormat.externalFormat = hwbFormatProps.externalFormat;
680 }
681 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo{
682 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
683 &externalFormat, // pNext
684 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
685 };
686
687 SkASSERT(!(vkTexInfo.fFlags & VK_IMAGE_CREATE_PROTECTED_BIT) ||
688 fSharedContext->isProtected() == Protected::kYes);
689
690 const VkImageCreateInfo imageCreateInfo = {
691 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
692 &externalMemoryImageInfo, // pNext
693 vkTexInfo.fFlags, // VkImageCreateFlags
694 VK_IMAGE_TYPE_2D, // VkImageType
695 vkTexInfo.fFormat, // VkFormat
696 { (uint32_t)dimensions.fWidth, (uint32_t)dimensions.fHeight, 1 }, // VkExtent3D
697 1, // mipLevels
698 1, // arrayLayers
699 VK_SAMPLE_COUNT_1_BIT, // samples
700 vkTexInfo.fImageTiling, // VkImageTiling
701 vkTexInfo.fImageUsageFlags, // VkImageUsageFlags
702 vkTexInfo.fSharingMode, // VkSharingMode
703 0, // queueFamilyCount
704 nullptr, // pQueueFamilyIndices
705 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
706 };
707
708 VkResult result;
709 VkImage image;
710 result = VULKAN_CALL(vkContext->interface(),
711 CreateImage(device, &imageCreateInfo, nullptr, &image));
712 if (result != VK_SUCCESS) {
713 return {};
714 }
715
716 const VkPhysicalDeviceMemoryProperties2& phyDevMemProps =
717 vkContext->vulkanCaps().physicalDeviceMemoryProperties2();
718 VulkanAlloc alloc;
719 if (!AllocateAndBindImageMemory(&alloc, image, phyDevMemProps, hwbProps, hardwareBuffer,
720 vkContext->interface(), device)) {
721 VULKAN_CALL(vkContext->interface(), DestroyImage(device, image, nullptr));
722 return {};
723 }
724
725 return { dimensions, vkTexInfo, VK_IMAGE_LAYOUT_UNDEFINED, VK_QUEUE_FAMILY_FOREIGN_EXT,
726 image, alloc};
727 }
728
729 #endif // SK_BUILD_FOR_ANDROID
730
731 } // namespace skgpu::graphite
732