• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2023 Google LLC
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #include "src/gpu/vk/VulkanUtilsPriv.h"
9 
10 #include "include/core/SkStream.h"
11 #include "include/gpu/vk/VulkanBackendContext.h"
12 #include "include/private/base/SkDebug.h"
13 #include "include/private/base/SkTFitsIn.h"
14 #include "include/private/base/SkTo.h"
15 #include "src/gpu/vk/VulkanInterface.h"
16 
17 #include <algorithm>
18 #include <vector>
19 
20 namespace skgpu {
21 
22 /**
23  * Define a macro that both ganesh and graphite can use to make simple calls into Vulkan so we can
24  * share more code between them.
25 */
26 #define SHARED_GR_VULKAN_CALL(IFACE, X) (IFACE)->fFunctions.f##X
27 
28 /**
29  * Returns a populated VkSamplerYcbcrConversionCreateInfo object based on VulkanYcbcrConversionInfo
30 */
SetupSamplerYcbcrConversionInfo(VkSamplerYcbcrConversionCreateInfo * outInfo,const VulkanYcbcrConversionInfo & conversionInfo)31 void SetupSamplerYcbcrConversionInfo(VkSamplerYcbcrConversionCreateInfo* outInfo,
32                                      const VulkanYcbcrConversionInfo& conversionInfo) {
33 #ifdef SK_DEBUG
34     const VkFormatFeatureFlags& featureFlags = conversionInfo.fFormatFeatures;
35 
36     // Format feature flags are only representative of an external format's capabilities, so skip
37     // these checks in the case of using a known format.
38     if (conversionInfo.fFormat == VK_FORMAT_UNDEFINED) {
39         if (conversionInfo.fXChromaOffset == VK_CHROMA_LOCATION_MIDPOINT ||
40             conversionInfo.fYChromaOffset == VK_CHROMA_LOCATION_MIDPOINT) {
41             SkASSERT(featureFlags & VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT);
42         }
43         if (conversionInfo.fXChromaOffset == VK_CHROMA_LOCATION_COSITED_EVEN ||
44             conversionInfo.fYChromaOffset == VK_CHROMA_LOCATION_COSITED_EVEN) {
45             SkASSERT(featureFlags & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT);
46         }
47         if (conversionInfo.fChromaFilter == VK_FILTER_LINEAR) {
48             SkASSERT(featureFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT);
49         }
50         if (conversionInfo.fForceExplicitReconstruction) {
51             SkASSERT(featureFlags &
52                     VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT);
53         }
54     }
55 #endif
56 
57     VkFilter chromaFilter = conversionInfo.fChromaFilter;
58     if (!(conversionInfo.fFormatFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT)) {
59         if (!(conversionInfo.fFormatFeatures &
60               VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT)) {
61             // Because we don't have have separate reconstruction filter, the min, mag and
62             // chroma filter must all match. However, we also don't support linear sampling so
63             // the min/mag filter have to be nearest. Therefore, we force the chrome filter to
64             // be nearest regardless of support for the feature
65             // VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT.
66             chromaFilter = VK_FILTER_NEAREST;
67         }
68     }
69 
70     outInfo->sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
71     outInfo->pNext = nullptr;
72     outInfo->format = conversionInfo.fFormat;
73     outInfo->ycbcrModel = conversionInfo.fYcbcrModel;
74     outInfo->ycbcrRange = conversionInfo.fYcbcrRange;
75     outInfo->components = conversionInfo.fComponents;
76     outInfo->xChromaOffset = conversionInfo.fXChromaOffset;
77     outInfo->yChromaOffset = conversionInfo.fYChromaOffset;
78     outInfo->chromaFilter = chromaFilter;
79     outInfo->forceExplicitReconstruction = conversionInfo.fForceExplicitReconstruction;
80 }
81 
SerializeVkYCbCrInfo(SkWStream * stream,const VulkanYcbcrConversionInfo & info)82 bool SerializeVkYCbCrInfo(SkWStream* stream, const VulkanYcbcrConversionInfo& info) {
83     SkASSERT(SkTFitsIn<uint64_t>(info.fFormat));
84     // fExternalFormat is already a uint64_t
85     SkASSERT(SkTFitsIn<uint8_t>(info.fYcbcrModel));
86     SkASSERT(SkTFitsIn<uint8_t>(info.fYcbcrRange));
87     SkASSERT(SkTFitsIn<uint8_t>(info.fXChromaOffset));
88     SkASSERT(SkTFitsIn<uint8_t>(info.fYChromaOffset));
89     SkASSERT(SkTFitsIn<uint64_t>(info.fChromaFilter));
90     SkASSERT(SkTFitsIn<uint64_t>(info.fFormatFeatures));
91     SkASSERT(SkTFitsIn<uint8_t>(info.fComponents.r));
92     SkASSERT(SkTFitsIn<uint8_t>(info.fComponents.g));
93     SkASSERT(SkTFitsIn<uint8_t>(info.fComponents.b));
94     SkASSERT(SkTFitsIn<uint8_t>(info.fComponents.a));
95     // fForceExplicitReconstruction is a VkBool32
96 
97     // TODO(robertphillips): this isn't as densely packed as possible
98     if (!stream->write64(static_cast<uint64_t>(info.fFormat)))           { return false; }
99     if (!stream->write64(info.fExternalFormat))                          { return false; }
100     if (!stream->write8(static_cast<uint8_t>(info.fYcbcrModel)))         { return false; }
101     if (!info.isValid()) {
102         return true;
103     }
104 
105     if (!stream->write8(static_cast<uint8_t>(info.fYcbcrRange)))         { return false; }
106     if (!stream->write8(static_cast<uint8_t>(info.fXChromaOffset)))      { return false; }
107     if (!stream->write8(static_cast<uint8_t>(info.fYChromaOffset)))      { return false; }
108     if (!stream->write64(static_cast<uint64_t>(info.fChromaFilter)))     { return false; }
109     if (!stream->write64(static_cast<uint64_t>(info.fFormatFeatures)))   { return false; }
110     if (!stream->write8(static_cast<uint8_t>(info.fComponents.r)))       { return false; }
111     if (!stream->write8(static_cast<uint8_t>(info.fComponents.g)))       { return false; }
112     if (!stream->write8(static_cast<uint8_t>(info.fComponents.b)))       { return false; }
113     if (!stream->write8(static_cast<uint8_t>(info.fComponents.a)))       { return false; }
114     if (!stream->writeBool(SkToBool(info.fForceExplicitReconstruction))) { return false;}
115 
116     return true;
117 }
118 
DeserializeVkYCbCrInfo(SkStream * stream,VulkanYcbcrConversionInfo * out)119 bool DeserializeVkYCbCrInfo(SkStream* stream, VulkanYcbcrConversionInfo* out) {
120     uint64_t tmp64;
121     uint8_t tmp8;
122 
123     if (!stream->readU64(&tmp64)) { return false; }
124     out->fFormat = static_cast<VkFormat>(tmp64);
125 
126     if (!stream->readU64(&tmp64)) { return false; }
127     out->fExternalFormat = tmp64;
128 
129     if (!stream->readU8(&tmp8)) { return false; }
130     out->fYcbcrModel = static_cast<VkSamplerYcbcrModelConversion>(tmp8);
131 
132     if (!out->isValid()) {
133         return true;
134     }
135 
136     if (!stream->readU8(&tmp8)) { return false; }
137     out->fYcbcrRange = static_cast<VkSamplerYcbcrRange>(tmp8);
138 
139     if (!stream->readU8(&tmp8)) { return false; }
140     out->fXChromaOffset = static_cast<VkChromaLocation>(tmp8);
141 
142     if (!stream->readU8(&tmp8)) { return false; }
143     out->fYChromaOffset = static_cast<VkChromaLocation>(tmp8);
144 
145     if (!stream->readU64(&tmp64)) { return false; }
146     out->fChromaFilter = static_cast<VkFilter>(tmp64);
147 
148     if (!stream->readU64(&tmp64)) { return false; }
149     out->fFormatFeatures = static_cast<VkFormatFeatureFlags>(tmp64);
150 
151     if (!stream->readU8(&tmp8)) { return false; }
152     out->fComponents.r = static_cast<VkComponentSwizzle>(tmp8);
153 
154     if (!stream->readU8(&tmp8)) { return false; }
155     out->fComponents.g = static_cast<VkComponentSwizzle>(tmp8);
156 
157     if (!stream->readU8(&tmp8)) { return false; }
158     out->fComponents.b = static_cast<VkComponentSwizzle>(tmp8);
159 
160     if (!stream->readU8(&tmp8)) { return false; }
161     out->fComponents.a = static_cast<VkComponentSwizzle>(tmp8);
162 
163     bool tmpBool;
164     if (!stream->readBool(&tmpBool)) { return false; }
165     out->fForceExplicitReconstruction = tmpBool;
166 
167     return false;
168 }
169 
170 #ifdef SK_BUILD_FOR_ANDROID
171 
172 /**
173  * Shared Vulkan AHardwareBuffer utility functions between graphite and ganesh
174 */
GetYcbcrConversionInfoFromFormatProps(VulkanYcbcrConversionInfo * outConversionInfo,const VkAndroidHardwareBufferFormatPropertiesANDROID & formatProps)175 void GetYcbcrConversionInfoFromFormatProps(
176         VulkanYcbcrConversionInfo* outConversionInfo,
177         const VkAndroidHardwareBufferFormatPropertiesANDROID& formatProps) {
178     outConversionInfo->fYcbcrModel = formatProps.suggestedYcbcrModel;
179     outConversionInfo->fYcbcrRange = formatProps.suggestedYcbcrRange;
180     outConversionInfo->fComponents = formatProps.samplerYcbcrConversionComponents;
181     outConversionInfo->fXChromaOffset = formatProps.suggestedXChromaOffset;
182     outConversionInfo->fYChromaOffset = formatProps.suggestedYChromaOffset;
183     outConversionInfo->fForceExplicitReconstruction = VK_FALSE;
184     outConversionInfo->fExternalFormat = formatProps.externalFormat;
185     outConversionInfo->fFormatFeatures = formatProps.formatFeatures;
186     if (VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT &
187         formatProps.formatFeatures) {
188         outConversionInfo->fChromaFilter = VK_FILTER_LINEAR;
189     } else {
190         outConversionInfo->fChromaFilter = VK_FILTER_NEAREST;
191     }
192 }
193 
GetAHardwareBufferProperties(VkAndroidHardwareBufferFormatPropertiesANDROID * outHwbFormatProps,VkAndroidHardwareBufferPropertiesANDROID * outHwbProps,const skgpu::VulkanInterface * interface,const AHardwareBuffer * hwBuffer,VkDevice device)194 bool GetAHardwareBufferProperties(
195         VkAndroidHardwareBufferFormatPropertiesANDROID* outHwbFormatProps,
196         VkAndroidHardwareBufferPropertiesANDROID* outHwbProps,
197         const skgpu::VulkanInterface* interface,
198         const AHardwareBuffer* hwBuffer,
199         VkDevice device) {
200     outHwbFormatProps->sType =
201             VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
202     outHwbFormatProps->pNext = nullptr;
203 
204     outHwbProps->sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
205     outHwbProps->pNext = outHwbFormatProps;
206 
207     VkResult result =
208             SHARED_GR_VULKAN_CALL(interface,
209                                   GetAndroidHardwareBufferProperties(device,
210                                                                      hwBuffer,
211                                                                      outHwbProps));
212     if (result != VK_SUCCESS) {
213         // The spec suggests VK_ERROR_OUT_OF_HOST_MEMORY and VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR
214         // are the only failure codes, but some platforms may report others, such as
215         // VK_ERROR_FORMAT_NOT_SUPPORTED (-11).
216         SkDebugf("Failed to get AndroidHardwareBufferProperties (result:%d)", result);
217 #if __ANDROID_API__ >= 26
218         AHardwareBuffer_Desc hwbDesc;
219         AHardwareBuffer_describe(hwBuffer, &hwbDesc);
220         SkDebugf("^ %" PRIu32 "x%" PRIu32 " AHB -- format:%" PRIu32 ", usage:%" PRIu64
221                  ", layers:%" PRIu32,
222                  hwbDesc.width,
223                  hwbDesc.height,
224                  hwbDesc.format,
225                  hwbDesc.usage,
226                  hwbDesc.layers);
227 #endif
228         return false;
229     }
230     return true;
231 }
232 
AllocateAndBindImageMemory(skgpu::VulkanAlloc * outVulkanAlloc,VkImage image,const VkPhysicalDeviceMemoryProperties2 & phyDevMemProps,const VkAndroidHardwareBufferPropertiesANDROID & hwbProps,AHardwareBuffer * hardwareBuffer,const skgpu::VulkanInterface * interface,VkDevice device)233 bool AllocateAndBindImageMemory(skgpu::VulkanAlloc* outVulkanAlloc,
234                                 VkImage image,
235                                 const VkPhysicalDeviceMemoryProperties2& phyDevMemProps,
236                                 const VkAndroidHardwareBufferPropertiesANDROID& hwbProps,
237                                 AHardwareBuffer* hardwareBuffer,
238                                 const skgpu::VulkanInterface* interface,
239                                 VkDevice device) {
240     VkResult result;
241     uint32_t typeIndex = 0;
242     bool foundHeap = false;
243     uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
244     for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
245         if (hwbProps.memoryTypeBits & (1 << i)) {
246             const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
247             uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
248                     VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
249             if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
250                 typeIndex = i;
251                 foundHeap = true;
252             }
253         }
254     }
255 
256     /**
257      * Fallback to use any available memory type for AHB.
258      *
259      * For external memory import, compatible memory types are decided by the Vulkan driver since
260      * the memory has been allocated externally. There are usually special requirements against
261      * external memory. e.g. AHB allocated with CPU R/W often usage bits is only importable for
262      * non-device-local heap on some AMD systems.
263     */
264     if (!foundHeap && hwbProps.memoryTypeBits) {
265         typeIndex = ffs(hwbProps.memoryTypeBits) - 1;
266         foundHeap = true;
267     }
268     if (!foundHeap) {
269         return false;
270     }
271 
272     VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
273     hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
274     hwbImportInfo.pNext = nullptr;
275     hwbImportInfo.buffer = hardwareBuffer;
276 
277     VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
278     dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
279     dedicatedAllocInfo.pNext = &hwbImportInfo;
280     dedicatedAllocInfo.image = image;
281     dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
282 
283     VkMemoryAllocateInfo allocInfo = {
284         VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,      // sType
285         &dedicatedAllocInfo,                         // pNext
286         hwbProps.allocationSize,                     // allocationSize
287         typeIndex,                                   // memoryTypeIndex
288     };
289 
290     VkDeviceMemory memory;
291     result = SHARED_GR_VULKAN_CALL(interface,
292                                    AllocateMemory(device, &allocInfo, nullptr, &memory));
293     if (result != VK_SUCCESS) {
294         return false;
295     }
296 
297     VkBindImageMemoryInfo bindImageInfo;
298     bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
299     bindImageInfo.pNext = nullptr;
300     bindImageInfo.image = image;
301     bindImageInfo.memory = memory;
302     bindImageInfo.memoryOffset = 0;
303 
304     result = SHARED_GR_VULKAN_CALL(interface, BindImageMemory2(device, 1, &bindImageInfo));
305     if (result != VK_SUCCESS) {
306         SHARED_GR_VULKAN_CALL(interface, FreeMemory(device, memory, nullptr));
307         return false;
308     }
309 
310     outVulkanAlloc->fMemory = memory;
311     outVulkanAlloc->fOffset = 0;
312     outVulkanAlloc->fSize = hwbProps.allocationSize;
313     outVulkanAlloc->fFlags = 0;
314     outVulkanAlloc->fBackendMemory = 0;
315     return true;
316 }
317 
318 #endif // SK_BUILD_FOR_ANDROID
319 
320 // Note: since this is called from Vulkan result-checking functions, any Vk calls this function
321 // makes must NOT be checked with those same functions to avoid infinite recursion.
InvokeDeviceLostCallback(const skgpu::VulkanInterface * vulkanInterface,VkDevice vkDevice,skgpu::VulkanDeviceLostContext deviceLostContext,skgpu::VulkanDeviceLostProc deviceLostProc,bool supportsDeviceFaultInfoExtension)322 void InvokeDeviceLostCallback(const skgpu::VulkanInterface* vulkanInterface,
323                               VkDevice vkDevice,
324                               skgpu::VulkanDeviceLostContext deviceLostContext,
325                               skgpu::VulkanDeviceLostProc deviceLostProc,
326                               bool supportsDeviceFaultInfoExtension) {
327     if (!deviceLostProc) {
328         return;
329     }
330 
331     std::vector<VkDeviceFaultAddressInfoEXT> addressInfos = {};
332     std::vector<VkDeviceFaultVendorInfoEXT> vendorInfos = {};
333     std::vector<std::byte> vendorBinaryData = {};
334 
335     if (!supportsDeviceFaultInfoExtension) {
336         deviceLostProc(deviceLostContext,
337                        "No details: VK_EXT_device_fault not available/enabled.",
338                        addressInfos,
339                        vendorInfos,
340                        vendorBinaryData);
341         return;
342     }
343 
344     // Query counts
345     VkDeviceFaultCountsEXT faultCounts = {};
346     faultCounts.sType = VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT;
347     VkResult result = SHARED_GR_VULKAN_CALL(vulkanInterface,
348                                             GetDeviceFaultInfo(vkDevice, &faultCounts, NULL));
349     if (result != VK_SUCCESS) {
350         deviceLostProc(
351                 deviceLostContext,
352                 "No details: VK_EXT_device_fault error counting failed: " + std::to_string(result),
353                 addressInfos,
354                 vendorInfos,
355                 vendorBinaryData);
356         return;
357     }
358 
359     // Prepare storage
360     addressInfos.resize(faultCounts.addressInfoCount);
361     vendorInfos.resize(faultCounts.vendorInfoCount);
362     vendorBinaryData.resize(faultCounts.vendorBinarySize);
363 
364     // Query fault info
365     VkDeviceFaultInfoEXT faultInfo = {};
366     faultInfo.sType             = VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT;
367     faultInfo.pAddressInfos     = addressInfos.data();
368     faultInfo.pVendorInfos      = vendorInfos.data();
369     faultInfo.pVendorBinaryData =
370             faultCounts.vendorBinarySize > 0 ? vendorBinaryData.data() : nullptr;
371     result = SHARED_GR_VULKAN_CALL(vulkanInterface,
372                                    GetDeviceFaultInfo(vkDevice, &faultCounts, &faultInfo));
373     if (result != VK_SUCCESS) {
374         deviceLostProc(
375                 deviceLostContext,
376                 "No details: VK_EXT_device_fault info dumping failed: " + std::to_string(result),
377                 addressInfos,
378                 vendorInfos,
379                 vendorBinaryData);
380         return;
381     }
382 
383     deviceLostProc(deviceLostContext,
384                    std::string(faultInfo.description),
385                    addressInfos,
386                    vendorInfos,
387                    vendorBinaryData);
388 }
389 
MakeInterface(const skgpu::VulkanBackendContext & context,const skgpu::VulkanExtensions * extOverride,uint32_t * instanceVersionOut,uint32_t * physDevVersionOut)390 sk_sp<skgpu::VulkanInterface> MakeInterface(const skgpu::VulkanBackendContext& context,
391                                             const skgpu::VulkanExtensions* extOverride,
392                                             uint32_t* instanceVersionOut,
393                                             uint32_t* physDevVersionOut) {
394     if (!extOverride) {
395         extOverride = context.fVkExtensions;
396     }
397     SkASSERT(extOverride);
398     PFN_vkEnumerateInstanceVersion localEnumerateInstanceVersion =
399             reinterpret_cast<PFN_vkEnumerateInstanceVersion>(
400                     context.fGetProc("vkEnumerateInstanceVersion", VK_NULL_HANDLE, VK_NULL_HANDLE));
401     uint32_t instanceVersion = 0;
402     if (!localEnumerateInstanceVersion) {
403         instanceVersion = VK_MAKE_VERSION(1, 0, 0);
404     } else {
405         VkResult err = localEnumerateInstanceVersion(&instanceVersion);
406         if (err) {
407             return nullptr;
408         }
409     }
410 
411     PFN_vkGetPhysicalDeviceProperties localGetPhysicalDeviceProperties =
412             reinterpret_cast<PFN_vkGetPhysicalDeviceProperties>(context.fGetProc(
413                     "vkGetPhysicalDeviceProperties", context.fInstance, VK_NULL_HANDLE));
414 
415     if (!localGetPhysicalDeviceProperties) {
416         return nullptr;
417     }
418     VkPhysicalDeviceProperties physDeviceProperties;
419     localGetPhysicalDeviceProperties(context.fPhysicalDevice, &physDeviceProperties);
420     uint32_t physDevVersion = physDeviceProperties.apiVersion;
421 
422     uint32_t apiVersion = context.fMaxAPIVersion ? context.fMaxAPIVersion : instanceVersion;
423 
424     instanceVersion = std::min(instanceVersion, apiVersion);
425     physDevVersion = std::min(physDevVersion, apiVersion);
426 
427     sk_sp<skgpu::VulkanInterface> interface(new skgpu::VulkanInterface(context.fGetProc,
428                                                                        context.fInstance,
429                                                                        context.fDevice,
430                                                                        instanceVersion,
431                                                                        physDevVersion,
432                                                                        extOverride));
433     if (!interface->validate(instanceVersion, physDevVersion, extOverride)) {
434         return nullptr;
435     }
436     if (physDevVersionOut) {
437         *physDevVersionOut = physDevVersion;
438     }
439     if (instanceVersionOut) {
440         *instanceVersionOut = instanceVersion;
441     }
442     return interface;
443 }
444 
445 } // namespace skgpu
446