1 /*
2 * Copyright 2020 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "tools/gpu/vk/VkYcbcrSamplerHelper.h"
9
10 #ifdef SK_VULKAN
11
12 #include "include/gpu/GrDirectContext.h"
13 #include "src/gpu/ganesh/GrDirectContextPriv.h"
14 #include "src/gpu/ganesh/vk/GrVkGpu.h"
15 #include "src/gpu/ganesh/vk/GrVkUtil.h"
16
GetExpectedY(int x,int y,int width,int height)17 int VkYcbcrSamplerHelper::GetExpectedY(int x, int y, int width, int height) {
18 return 16 + (x + y) * 219 / (width + height - 2);
19 }
20
GetExpectedUV(int x,int y,int width,int height)21 std::pair<int, int> VkYcbcrSamplerHelper::GetExpectedUV(int x, int y, int width, int height) {
22 return { 16 + x * 224 / (width - 1), 16 + y * 224 / (height - 1) };
23 }
24
vkGpu()25 GrVkGpu* VkYcbcrSamplerHelper::vkGpu() {
26 return (GrVkGpu*) fDContext->priv().getGpu();
27 }
28
VkYcbcrSamplerHelper(GrDirectContext * dContext)29 VkYcbcrSamplerHelper::VkYcbcrSamplerHelper(GrDirectContext* dContext) : fDContext(dContext) {
30 SkASSERT_RELEASE(dContext->backend() == GrBackendApi::kVulkan);
31 }
32
~VkYcbcrSamplerHelper()33 VkYcbcrSamplerHelper::~VkYcbcrSamplerHelper() {
34 GrVkGpu* vkGpu = this->vkGpu();
35
36 if (fImage != VK_NULL_HANDLE) {
37 GR_VK_CALL(vkGpu->vkInterface(), DestroyImage(vkGpu->device(), fImage, nullptr));
38 fImage = VK_NULL_HANDLE;
39 }
40 if (fImageMemory != VK_NULL_HANDLE) {
41 GR_VK_CALL(vkGpu->vkInterface(), FreeMemory(vkGpu->device(), fImageMemory, nullptr));
42 fImageMemory = VK_NULL_HANDLE;
43 }
44 }
45
isYCbCrSupported()46 bool VkYcbcrSamplerHelper::isYCbCrSupported() {
47 GrVkGpu* vkGpu = this->vkGpu();
48
49 if (!vkGpu->vkCaps().supportsYcbcrConversion()) {
50 return false;
51 }
52
53 // The createBackendTexture call (which is the point of this helper class) requires linear
54 // support for VK_FORMAT_G8_B8R8_2PLANE_420_UNORM including sampling and cosited chroma.
55 // Verify that the image format is supported.
56 VkFormatProperties formatProperties;
57 GR_VK_CALL(vkGpu->vkInterface(),
58 GetPhysicalDeviceFormatProperties(vkGpu->physicalDevice(),
59 VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
60 &formatProperties));
61 auto linFlags = formatProperties.linearTilingFeatures;
62 if (!(linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) ||
63 !(linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) ||
64 !(linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT) ||
65 !(linFlags & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT)) {
66 // VK_FORMAT_G8_B8R8_2PLANE_420_UNORM is not supported
67 return false;
68 }
69
70 return true;
71 }
72
createBackendTexture(uint32_t width,uint32_t height)73 bool VkYcbcrSamplerHelper::createBackendTexture(uint32_t width, uint32_t height) {
74 GrVkGpu* vkGpu = this->vkGpu();
75 VkResult result;
76
77 // Create YCbCr image.
78 VkImageCreateInfo vkImageInfo = {};
79 vkImageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
80 vkImageInfo.imageType = VK_IMAGE_TYPE_2D;
81 vkImageInfo.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
82 vkImageInfo.extent = VkExtent3D{width, height, 1};
83 vkImageInfo.mipLevels = 1;
84 vkImageInfo.arrayLayers = 1;
85 vkImageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
86 vkImageInfo.tiling = VK_IMAGE_TILING_LINEAR;
87 vkImageInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
88 VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
89 vkImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
90 vkImageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
91
92 SkASSERT(fImage == VK_NULL_HANDLE);
93 GR_VK_CALL_RESULT(vkGpu, result, CreateImage(vkGpu->device(), &vkImageInfo, nullptr, &fImage));
94 if (result != VK_SUCCESS) {
95 return false;
96 }
97
98 VkMemoryRequirements requirements;
99 GR_VK_CALL(vkGpu->vkInterface(), GetImageMemoryRequirements(vkGpu->device(),
100 fImage,
101 &requirements));
102
103 uint32_t memoryTypeIndex = 0;
104 bool foundHeap = false;
105 VkPhysicalDeviceMemoryProperties phyDevMemProps;
106 GR_VK_CALL(vkGpu->vkInterface(), GetPhysicalDeviceMemoryProperties(vkGpu->physicalDevice(),
107 &phyDevMemProps));
108 for (uint32_t i = 0; i < phyDevMemProps.memoryTypeCount && !foundHeap; ++i) {
109 if (requirements.memoryTypeBits & (1 << i)) {
110 // Map host-visible memory.
111 if (phyDevMemProps.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
112 memoryTypeIndex = i;
113 foundHeap = true;
114 }
115 }
116 }
117 if (!foundHeap) {
118 return false;
119 }
120
121 VkMemoryAllocateInfo allocInfo = {};
122 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
123 allocInfo.allocationSize = requirements.size;
124 allocInfo.memoryTypeIndex = memoryTypeIndex;
125
126 SkASSERT(fImageMemory == VK_NULL_HANDLE);
127 GR_VK_CALL_RESULT(vkGpu, result, AllocateMemory(vkGpu->device(), &allocInfo,
128 nullptr, &fImageMemory));
129 if (result != VK_SUCCESS) {
130 return false;
131 }
132
133 void* mappedBuffer;
134 GR_VK_CALL_RESULT(vkGpu, result, MapMemory(vkGpu->device(), fImageMemory, 0u,
135 requirements.size, 0u, &mappedBuffer));
136 if (result != VK_SUCCESS) {
137 return false;
138 }
139
140 // Write Y channel.
141 VkImageSubresource subresource;
142 subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT;
143 subresource.mipLevel = 0;
144 subresource.arrayLayer = 0;
145
146 VkSubresourceLayout yLayout;
147 GR_VK_CALL(vkGpu->vkInterface(), GetImageSubresourceLayout(vkGpu->device(), fImage,
148 &subresource, &yLayout));
149 uint8_t* bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + yLayout.offset;
150 for (size_t y = 0; y < height; ++y) {
151 for (size_t x = 0; x < width; ++x) {
152 bufferData[y * yLayout.rowPitch + x] = GetExpectedY(x, y, width, height);
153 }
154 }
155
156 // Write UV channels.
157 subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT;
158 VkSubresourceLayout uvLayout;
159 GR_VK_CALL(vkGpu->vkInterface(), GetImageSubresourceLayout(vkGpu->device(), fImage,
160 &subresource, &uvLayout));
161 bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + uvLayout.offset;
162 for (size_t y = 0; y < height / 2; ++y) {
163 for (size_t x = 0; x < width / 2; ++x) {
164 auto [u, v] = GetExpectedUV(2*x, 2*y, width, height);
165 bufferData[y * uvLayout.rowPitch + x * 2] = u;
166 bufferData[y * uvLayout.rowPitch + x * 2 + 1] = v;
167 }
168 }
169
170 VkMappedMemoryRange flushRange;
171 flushRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
172 flushRange.pNext = nullptr;
173 flushRange.memory = fImageMemory;
174 flushRange.offset = 0;
175 flushRange.size = VK_WHOLE_SIZE;
176 GR_VK_CALL_RESULT(vkGpu, result, FlushMappedMemoryRanges(vkGpu->device(), 1, &flushRange));
177 if (result != VK_SUCCESS) {
178 return false;
179 }
180 GR_VK_CALL(vkGpu->vkInterface(), UnmapMemory(vkGpu->device(), fImageMemory));
181
182 // Bind image memory.
183 GR_VK_CALL_RESULT(vkGpu, result, BindImageMemory(vkGpu->device(), fImage, fImageMemory, 0u));
184 if (result != VK_SUCCESS) {
185 return false;
186 }
187
188 // Wrap the image into SkImage.
189 VkFormatProperties formatProperties;
190 GR_VK_CALL(vkGpu->vkInterface(),
191 GetPhysicalDeviceFormatProperties(vkGpu->physicalDevice(),
192 VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
193 &formatProperties));
194 SkDEBUGCODE(auto linFlags = formatProperties.linearTilingFeatures;)
195 SkASSERT((linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) &&
196 (linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) &&
197 (linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT) &&
198 (linFlags & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT));
199
200 GrVkYcbcrConversionInfo ycbcrInfo = {vkImageInfo.format,
201 /*externalFormat=*/0,
202 VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
203 VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
204 VK_CHROMA_LOCATION_COSITED_EVEN,
205 VK_CHROMA_LOCATION_COSITED_EVEN,
206 VK_FILTER_LINEAR,
207 false,
208 formatProperties.linearTilingFeatures};
209 skgpu::VulkanAlloc alloc;
210 alloc.fMemory = fImageMemory;
211 alloc.fOffset = 0;
212 alloc.fSize = requirements.size;
213
214 GrVkImageInfo imageInfo = {fImage,
215 alloc,
216 VK_IMAGE_TILING_LINEAR,
217 VK_IMAGE_LAYOUT_UNDEFINED,
218 vkImageInfo.format,
219 vkImageInfo.usage,
220 1 /* sample count */,
221 1 /* levelCount */,
222 VK_QUEUE_FAMILY_IGNORED,
223 GrProtected::kNo,
224 ycbcrInfo};
225
226 fTexture = GrBackendTexture(width, height, imageInfo);
227 return true;
228 }
229
230 #endif // SK_VULKAN
231