1 /*
2 * Copyright 2020 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "tools/gpu/vk/VkYcbcrSamplerHelper.h"
9
10 #ifdef SK_VULKAN
11
12 #include "include/gpu/GrDirectContext.h"
13 #include "include/gpu/ganesh/vk/GrVkBackendSurface.h"
14 #include "src/gpu/ganesh/GrDirectContextPriv.h"
15 #include "src/gpu/ganesh/vk/GrVkGpu.h"
16 #include "src/gpu/ganesh/vk/GrVkUtil.h"
17 #include "src/gpu/vk/VulkanInterface.h"
18
19 #if defined(SK_GRAPHITE)
20 #include "include/gpu/GpuTypes.h"
21 #include "include/gpu/graphite/BackendTexture.h"
22 #include "include/gpu/graphite/Recorder.h"
23 #include "include/gpu/graphite/vk/VulkanGraphiteTypes.h"
24 #include "src/gpu/graphite/vk/VulkanGraphiteUtilsPriv.h"
25 #include "src/gpu/graphite/vk/VulkanSharedContext.h"
26 #endif
27
GetExpectedY(int x,int y,int width,int height)28 int VkYcbcrSamplerHelper::GetExpectedY(int x, int y, int width, int height) {
29 return 16 + (x + y) * 219 / (width + height - 2);
30 }
31
GetExpectedUV(int x,int y,int width,int height)32 std::pair<int, int> VkYcbcrSamplerHelper::GetExpectedUV(int x, int y, int width, int height) {
33 return { 16 + x * 224 / (width - 1), 16 + y * 224 / (height - 1) };
34 }
35
36 namespace {
37
populate_ycbcr_image_info(VkImageCreateInfo * outImageInfo,uint32_t width,uint32_t height)38 void populate_ycbcr_image_info(VkImageCreateInfo* outImageInfo, uint32_t width, uint32_t height) {
39 outImageInfo->sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
40 outImageInfo->pNext = nullptr;
41 outImageInfo->flags = 0;
42 outImageInfo->imageType = VK_IMAGE_TYPE_2D;
43 outImageInfo->format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
44 outImageInfo->extent = VkExtent3D{width, height, 1};
45 outImageInfo->mipLevels = 1;
46 outImageInfo->arrayLayers = 1;
47 outImageInfo->samples = VK_SAMPLE_COUNT_1_BIT;
48 outImageInfo->tiling = VK_IMAGE_TILING_LINEAR;
49 outImageInfo->usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
50 VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
51 outImageInfo->sharingMode = VK_SHARING_MODE_EXCLUSIVE;
52 outImageInfo->queueFamilyIndexCount = 0;
53 outImageInfo->pQueueFamilyIndices = nullptr;
54 outImageInfo->initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
55 }
56
find_memory_type_index(const VkPhysicalDeviceMemoryProperties & phyDevMemProps,const VkMemoryRequirements & memoryRequirements,uint32_t * memoryTypeIndex)57 bool find_memory_type_index(const VkPhysicalDeviceMemoryProperties& phyDevMemProps,
58 const VkMemoryRequirements& memoryRequirements,
59 uint32_t* memoryTypeIndex) {
60 for (uint32_t i = 0; i < phyDevMemProps.memoryTypeCount; ++i) {
61 if (memoryRequirements.memoryTypeBits & (1 << i)) {
62 // Map host-visible memory.
63 if (phyDevMemProps.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
64 *memoryTypeIndex = i;
65 return true;
66 }
67 }
68 }
69 return false;
70 }
71
72 }
73
74 #ifdef SK_GRAPHITE
75 // TODO(b/339211930): When graphite and ganesh can share a macro for certain Vulkan driver calls,
76 // much more code can be shared between this method and createGrBackendTexture.
createBackendTexture(uint32_t width,uint32_t height)77 bool VkYcbcrSamplerHelper::createBackendTexture(uint32_t width, uint32_t height) {
78 // Create YCbCr image.
79 VkImageCreateInfo vkImageInfo;
80 populate_ycbcr_image_info(&vkImageInfo, width, height);
81 SkASSERT(fImage == VK_NULL_HANDLE);
82
83 VkResult result;
84 VULKAN_CALL_RESULT(fSharedCtxt, result, CreateImage(fSharedCtxt->device(),
85 &vkImageInfo,
86 /*pAllocator=*/nullptr,
87 &fImage));
88 if (result != VK_SUCCESS) {
89 return false;
90 }
91
92 VkMemoryRequirements requirements;
93 VULKAN_CALL(fSharedCtxt->interface(), GetImageMemoryRequirements(fSharedCtxt->device(),
94 fImage,
95 &requirements));
96 uint32_t memoryTypeIndex = 0;
97 const VkPhysicalDeviceMemoryProperties& phyDevMemProps =
98 fSharedCtxt->vulkanCaps().physicalDeviceMemoryProperties2().memoryProperties;
99 if (!find_memory_type_index(phyDevMemProps, requirements, &memoryTypeIndex)) {
100 return false;
101 }
102
103 VkMemoryAllocateInfo allocInfo;
104 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
105 allocInfo.pNext = nullptr;
106 allocInfo.allocationSize = requirements.size;
107 allocInfo.memoryTypeIndex = memoryTypeIndex;
108
109 SkASSERT(fImageMemory == VK_NULL_HANDLE);
110 VULKAN_CALL_RESULT(fSharedCtxt, result, AllocateMemory(fSharedCtxt->device(),
111 &allocInfo,
112 nullptr,
113 &fImageMemory));
114 if (result != VK_SUCCESS) {
115 return false;
116 }
117
118 void* mappedBuffer;
119 VULKAN_CALL_RESULT(fSharedCtxt, result, MapMemory(fSharedCtxt->device(),
120 fImageMemory,
121 /*offset=*/0u,
122 requirements.size,
123 /*flags=*/0u,
124 &mappedBuffer));
125 if (result != VK_SUCCESS) {
126 return false;
127 }
128
129 // Write Y channel.
130 VkImageSubresource subresource;
131 subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT;
132 subresource.mipLevel = 0;
133 subresource.arrayLayer = 0;
134
135 VkSubresourceLayout yLayout;
136 VULKAN_CALL(fSharedCtxt->interface(),
137 GetImageSubresourceLayout(fSharedCtxt->device(), fImage, &subresource, &yLayout));
138 uint8_t* bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + yLayout.offset;
139 for (size_t y = 0; y < height; ++y) {
140 for (size_t x = 0; x < width; ++x) {
141 bufferData[y * yLayout.rowPitch + x] = GetExpectedY(x, y, width, height);
142 }
143 }
144
145 // Write UV channels.
146 subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT;
147 VkSubresourceLayout uvLayout;
148 VULKAN_CALL(fSharedCtxt->interface(), GetImageSubresourceLayout(fSharedCtxt->device(),
149 fImage,
150 &subresource,
151 &uvLayout));
152 bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + uvLayout.offset;
153 for (size_t y = 0; y < height / 2; ++y) {
154 for (size_t x = 0; x < width / 2; ++x) {
155 auto [u, v] = GetExpectedUV(2*x, 2*y, width, height);
156 bufferData[y * uvLayout.rowPitch + x * 2] = u;
157 bufferData[y * uvLayout.rowPitch + x * 2 + 1] = v;
158 }
159 }
160
161 VkMappedMemoryRange flushRange;
162 flushRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
163 flushRange.pNext = nullptr;
164 flushRange.memory = fImageMemory;
165 flushRange.offset = 0;
166 flushRange.size = VK_WHOLE_SIZE;
167 VULKAN_CALL_RESULT(fSharedCtxt, result, FlushMappedMemoryRanges(fSharedCtxt->device(),
168 /*memoryRangeCount=*/1,
169 &flushRange));
170 if (result != VK_SUCCESS) {
171 return false;
172 }
173 VULKAN_CALL(fSharedCtxt->interface(), UnmapMemory(fSharedCtxt->device(), fImageMemory));
174
175 // Bind image memory.
176 VULKAN_CALL_RESULT(fSharedCtxt, result, BindImageMemory(fSharedCtxt->device(),
177 fImage,
178 fImageMemory,
179 /*memoryOffset=*/0u));
180 if (result != VK_SUCCESS) {
181 return false;
182 }
183
184 // Wrap the image into SkImage.
185 VkFormatProperties formatProperties;
186 SkASSERT(fPhysDev != VK_NULL_HANDLE);
187 VULKAN_CALL(fSharedCtxt->interface(),
188 GetPhysicalDeviceFormatProperties(fPhysDev,
189 VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
190 &formatProperties));
191 SkDEBUGCODE(auto linFlags = formatProperties.linearTilingFeatures;)
192 SkASSERT((linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) &&
193 (linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) &&
194 (linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT) &&
195 (linFlags & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT));
196
197 skgpu::VulkanYcbcrConversionInfo ycbcrInfo = {vkImageInfo.format,
198 /*externalFormat=*/0,
199 VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
200 VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
201 VK_CHROMA_LOCATION_COSITED_EVEN,
202 VK_CHROMA_LOCATION_COSITED_EVEN,
203 VK_FILTER_LINEAR,
204 false,
205 formatProperties.linearTilingFeatures};
206 skgpu::VulkanAlloc alloc;
207 alloc.fMemory = fImageMemory;
208 alloc.fOffset = 0;
209 alloc.fSize = requirements.size;
210
211 skgpu::graphite::VulkanTextureInfo imageInfo = {
212 static_cast<uint32_t>(vkImageInfo.samples),
213 skgpu::Mipmapped::kNo,
214 VK_IMAGE_CREATE_PROTECTED_BIT,
215 vkImageInfo.format,
216 vkImageInfo.tiling,
217 vkImageInfo.usage,
218 vkImageInfo.sharingMode,
219 VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT,
220 ycbcrInfo};
221
222 fTexture = skgpu::graphite::BackendTexture{{(int32_t)width, (int32_t)height},
223 imageInfo,
224 VK_IMAGE_LAYOUT_UNDEFINED,
225 /*queueFamilyIndex=*/0,
226 fImage,
227 alloc};
228 return true;
229 }
230 #endif // SK_GRAPHITE
231
createGrBackendTexture(uint32_t width,uint32_t height)232 bool VkYcbcrSamplerHelper::createGrBackendTexture(uint32_t width, uint32_t height) {
233 GrVkGpu* vkGpu = this->vkGpu();
234 VkResult result;
235
236 // Create YCbCr image.
237 VkImageCreateInfo vkImageInfo;
238 populate_ycbcr_image_info(&vkImageInfo, width, height);
239 SkASSERT(fImage == VK_NULL_HANDLE);
240
241 GR_VK_CALL_RESULT(vkGpu, result, CreateImage(vkGpu->device(), &vkImageInfo, nullptr, &fImage));
242 if (result != VK_SUCCESS) {
243 return false;
244 }
245
246 VkMemoryRequirements requirements;
247 GR_VK_CALL(vkGpu->vkInterface(), GetImageMemoryRequirements(vkGpu->device(),
248 fImage,
249 &requirements));
250
251 uint32_t memoryTypeIndex = 0;
252 VkPhysicalDeviceMemoryProperties phyDevMemProps;
253 GR_VK_CALL(vkGpu->vkInterface(), GetPhysicalDeviceMemoryProperties(vkGpu->physicalDevice(),
254 &phyDevMemProps));
255 if (!find_memory_type_index(phyDevMemProps, requirements, &memoryTypeIndex)) {
256 return false;
257 }
258
259 VkMemoryAllocateInfo allocInfo = {};
260 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
261 allocInfo.allocationSize = requirements.size;
262 allocInfo.memoryTypeIndex = memoryTypeIndex;
263
264 SkASSERT(fImageMemory == VK_NULL_HANDLE);
265 GR_VK_CALL_RESULT(vkGpu, result, AllocateMemory(vkGpu->device(), &allocInfo,
266 nullptr, &fImageMemory));
267 if (result != VK_SUCCESS) {
268 return false;
269 }
270
271 void* mappedBuffer;
272 GR_VK_CALL_RESULT(vkGpu, result, MapMemory(vkGpu->device(), fImageMemory, 0u,
273 requirements.size, 0u, &mappedBuffer));
274 if (result != VK_SUCCESS) {
275 return false;
276 }
277
278 // Write Y channel.
279 VkImageSubresource subresource;
280 subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT;
281 subresource.mipLevel = 0;
282 subresource.arrayLayer = 0;
283
284 VkSubresourceLayout yLayout;
285 GR_VK_CALL(vkGpu->vkInterface(), GetImageSubresourceLayout(vkGpu->device(), fImage,
286 &subresource, &yLayout));
287 uint8_t* bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + yLayout.offset;
288 for (size_t y = 0; y < height; ++y) {
289 for (size_t x = 0; x < width; ++x) {
290 bufferData[y * yLayout.rowPitch + x] = GetExpectedY(x, y, width, height);
291 }
292 }
293
294 // Write UV channels.
295 subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT;
296 VkSubresourceLayout uvLayout;
297 GR_VK_CALL(vkGpu->vkInterface(), GetImageSubresourceLayout(vkGpu->device(), fImage,
298 &subresource, &uvLayout));
299 bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + uvLayout.offset;
300 for (size_t y = 0; y < height / 2; ++y) {
301 for (size_t x = 0; x < width / 2; ++x) {
302 auto [u, v] = GetExpectedUV(2*x, 2*y, width, height);
303 bufferData[y * uvLayout.rowPitch + x * 2] = u;
304 bufferData[y * uvLayout.rowPitch + x * 2 + 1] = v;
305 }
306 }
307
308 VkMappedMemoryRange flushRange;
309 flushRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
310 flushRange.pNext = nullptr;
311 flushRange.memory = fImageMemory;
312 flushRange.offset = 0;
313 flushRange.size = VK_WHOLE_SIZE;
314 GR_VK_CALL_RESULT(vkGpu, result, FlushMappedMemoryRanges(vkGpu->device(), 1, &flushRange));
315 if (result != VK_SUCCESS) {
316 return false;
317 }
318 GR_VK_CALL(vkGpu->vkInterface(), UnmapMemory(vkGpu->device(), fImageMemory));
319
320 // Bind image memory.
321 GR_VK_CALL_RESULT(vkGpu, result, BindImageMemory(vkGpu->device(), fImage, fImageMemory, 0u));
322 if (result != VK_SUCCESS) {
323 return false;
324 }
325
326 // Wrap the image into SkImage.
327 VkFormatProperties formatProperties;
328 GR_VK_CALL(vkGpu->vkInterface(),
329 GetPhysicalDeviceFormatProperties(vkGpu->physicalDevice(),
330 VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
331 &formatProperties));
332 SkDEBUGCODE(auto linFlags = formatProperties.linearTilingFeatures;)
333 SkASSERT((linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) &&
334 (linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) &&
335 (linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT) &&
336 (linFlags & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT));
337
338 GrVkYcbcrConversionInfo ycbcrInfo = {vkImageInfo.format,
339 /*externalFormat=*/0,
340 VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
341 VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
342 VK_CHROMA_LOCATION_COSITED_EVEN,
343 VK_CHROMA_LOCATION_COSITED_EVEN,
344 VK_FILTER_LINEAR,
345 false,
346 formatProperties.linearTilingFeatures,
347 /*fComponents=*/{}};
348 skgpu::VulkanAlloc alloc;
349 alloc.fMemory = fImageMemory;
350 alloc.fOffset = 0;
351 alloc.fSize = requirements.size;
352
353 GrVkImageInfo imageInfo = {fImage,
354 alloc,
355 VK_IMAGE_TILING_LINEAR,
356 VK_IMAGE_LAYOUT_UNDEFINED,
357 vkImageInfo.format,
358 vkImageInfo.usage,
359 1 /* sample count */,
360 1 /* levelCount */,
361 VK_QUEUE_FAMILY_IGNORED,
362 GrProtected::kNo,
363 ycbcrInfo};
364
365 fGrTexture = GrBackendTextures::MakeVk(width, height, imageInfo);
366 return true;
367 }
368
vkGpu()369 GrVkGpu* VkYcbcrSamplerHelper::vkGpu() {
370 return (GrVkGpu*) fDContext->priv().getGpu();
371 }
372
VkYcbcrSamplerHelper(GrDirectContext * dContext)373 VkYcbcrSamplerHelper::VkYcbcrSamplerHelper(GrDirectContext* dContext) : fDContext(dContext) {
374 SkASSERT_RELEASE(dContext->backend() == GrBackendApi::kVulkan);
375 }
376
~VkYcbcrSamplerHelper()377 VkYcbcrSamplerHelper::~VkYcbcrSamplerHelper() {
378 #ifdef SK_GRAPHITE
379 if (fSharedCtxt) {
380 if (fImage != VK_NULL_HANDLE) {
381 VULKAN_CALL(fSharedCtxt->interface(),
382 DestroyImage(fSharedCtxt->device(), fImage, nullptr));
383 fImage = VK_NULL_HANDLE;
384 }
385 if (fImageMemory != VK_NULL_HANDLE) {
386 VULKAN_CALL(fSharedCtxt->interface(),
387 FreeMemory(fSharedCtxt->device(), fImageMemory, nullptr));
388 fImageMemory = VK_NULL_HANDLE;
389 }
390 } else
391 #endif // SK_GRAPHITE
392 {
393 GrVkGpu* vkGpu = this->vkGpu();
394
395 if (fImage != VK_NULL_HANDLE) {
396 GR_VK_CALL(vkGpu->vkInterface(), DestroyImage(vkGpu->device(), fImage, nullptr));
397 fImage = VK_NULL_HANDLE;
398 }
399 if (fImageMemory != VK_NULL_HANDLE) {
400 GR_VK_CALL(vkGpu->vkInterface(), FreeMemory(vkGpu->device(), fImageMemory, nullptr));
401 fImageMemory = VK_NULL_HANDLE;
402 }
403 }
404 }
405
isYCbCrSupported()406 bool VkYcbcrSamplerHelper::isYCbCrSupported() {
407 VkFormatProperties formatProperties;
408 #ifdef SK_GRAPHITE
409 if (fSharedCtxt) {
410 if (!fSharedCtxt->vulkanCaps().supportsYcbcrConversion()) {
411 return false;
412 }
413
414 SkASSERT(fPhysDev != VK_NULL_HANDLE);
415 VULKAN_CALL(fSharedCtxt->interface(),
416 GetPhysicalDeviceFormatProperties(fPhysDev,
417 VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
418 &formatProperties));
419 } else
420 #endif
421 {
422 GrVkGpu* vkGpu = this->vkGpu();
423 if (!vkGpu->vkCaps().supportsYcbcrConversion()) {
424 return false;
425 }
426
427 GR_VK_CALL(vkGpu->vkInterface(),
428 GetPhysicalDeviceFormatProperties(vkGpu->physicalDevice(),
429 VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
430 &formatProperties));
431 }
432
433 // The createBackendTexture call (which is the point of this helper class) requires linear
434 // support for VK_FORMAT_G8_B8R8_2PLANE_420_UNORM including sampling and cosited chroma.
435 // Verify that the image format is supported.
436 auto linFlags = formatProperties.linearTilingFeatures;
437 if (!(linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) ||
438 !(linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) ||
439 !(linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT) ||
440 !(linFlags & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT)) {
441 // VK_FORMAT_G8_B8R8_2PLANE_420_UNORM is not supported
442 return false;
443 }
444 return true;
445 }
446 #endif // SK_VULKAN
447