1 /*
2 * Copyright 2020 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "tools/gpu/vk/VkYcbcrSamplerHelper.h"
9
10 #ifdef SK_VULKAN
11
12 #include "include/gpu/GrDirectContext.h"
13 #include "src/gpu/GrDirectContextPriv.h"
14 #include "src/gpu/vk/GrVkGpu.h"
15 #include "src/gpu/vk/GrVkUtil.h"
16
GetExpectedY(int x,int y,int width,int height)17 int VkYcbcrSamplerHelper::GetExpectedY(int x, int y, int width, int height) {
18 return 16 + (x + y) * 219 / (width + height - 2);
19 }
20
GetExpectedUV(int x,int y,int width,int height)21 std::pair<int, int> VkYcbcrSamplerHelper::GetExpectedUV(int x, int y, int width, int height) {
22 return { 16 + x * 224 / (width - 1), 16 + y * 224 / (height - 1) };
23 }
24
vkGpu()25 GrVkGpu* VkYcbcrSamplerHelper::vkGpu() {
26 return (GrVkGpu*) fDContext->priv().getGpu();
27 }
28
VkYcbcrSamplerHelper(GrDirectContext * dContext)29 VkYcbcrSamplerHelper::VkYcbcrSamplerHelper(GrDirectContext* dContext) : fDContext(dContext) {
30 SkASSERT_RELEASE(dContext->backend() == GrBackendApi::kVulkan);
31 }
32
~VkYcbcrSamplerHelper()33 VkYcbcrSamplerHelper::~VkYcbcrSamplerHelper() {
34 GrVkGpu* vkGpu = this->vkGpu();
35
36 if (fImage != VK_NULL_HANDLE) {
37 GR_VK_CALL(vkGpu->vkInterface(), DestroyImage(vkGpu->device(), fImage, nullptr));
38 fImage = VK_NULL_HANDLE;
39 }
40 if (fImageMemory != VK_NULL_HANDLE) {
41 GR_VK_CALL(vkGpu->vkInterface(), FreeMemory(vkGpu->device(), fImageMemory, nullptr));
42 fImageMemory = VK_NULL_HANDLE;
43 }
44 }
45
isYCbCrSupported()46 bool VkYcbcrSamplerHelper::isYCbCrSupported() {
47 GrVkGpu* vkGpu = this->vkGpu();
48
49 return vkGpu->vkCaps().supportsYcbcrConversion();
50 }
51
createBackendTexture(uint32_t width,uint32_t height)52 bool VkYcbcrSamplerHelper::createBackendTexture(uint32_t width, uint32_t height) {
53 GrVkGpu* vkGpu = this->vkGpu();
54 VkResult result;
55
56 // Verify that the image format is supported.
57 VkFormatProperties formatProperties;
58 GR_VK_CALL(vkGpu->vkInterface(),
59 GetPhysicalDeviceFormatProperties(vkGpu->physicalDevice(),
60 VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
61 &formatProperties));
62 if (!(formatProperties.linearTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
63 // VK_FORMAT_G8_B8R8_2PLANE_420_UNORM is not supported
64 return false;
65 }
66
67 // Create YCbCr image.
68 VkImageCreateInfo vkImageInfo = {};
69 vkImageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
70 vkImageInfo.imageType = VK_IMAGE_TYPE_2D;
71 vkImageInfo.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
72 vkImageInfo.extent = VkExtent3D{width, height, 1};
73 vkImageInfo.mipLevels = 1;
74 vkImageInfo.arrayLayers = 1;
75 vkImageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
76 vkImageInfo.tiling = VK_IMAGE_TILING_LINEAR;
77 vkImageInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
78 VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
79 vkImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
80 vkImageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
81
82 SkASSERT(fImage == VK_NULL_HANDLE);
83 GR_VK_CALL_RESULT(vkGpu, result, CreateImage(vkGpu->device(), &vkImageInfo, nullptr, &fImage));
84 if (result != VK_SUCCESS) {
85 return false;
86 }
87
88 VkMemoryRequirements requirements;
89 GR_VK_CALL(vkGpu->vkInterface(), GetImageMemoryRequirements(vkGpu->device(),
90 fImage,
91 &requirements));
92
93 uint32_t memoryTypeIndex = 0;
94 bool foundHeap = false;
95 VkPhysicalDeviceMemoryProperties phyDevMemProps;
96 GR_VK_CALL(vkGpu->vkInterface(), GetPhysicalDeviceMemoryProperties(vkGpu->physicalDevice(),
97 &phyDevMemProps));
98 for (uint32_t i = 0; i < phyDevMemProps.memoryTypeCount && !foundHeap; ++i) {
99 if (requirements.memoryTypeBits & (1 << i)) {
100 // Map host-visible memory.
101 if (phyDevMemProps.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
102 memoryTypeIndex = i;
103 foundHeap = true;
104 }
105 }
106 }
107 if (!foundHeap) {
108 return false;
109 }
110
111 VkMemoryAllocateInfo allocInfo = {};
112 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
113 allocInfo.allocationSize = requirements.size;
114 allocInfo.memoryTypeIndex = memoryTypeIndex;
115
116 SkASSERT(fImageMemory == VK_NULL_HANDLE);
117 GR_VK_CALL_RESULT(vkGpu, result, AllocateMemory(vkGpu->device(), &allocInfo,
118 nullptr, &fImageMemory));
119 if (result != VK_SUCCESS) {
120 return false;
121 }
122
123 void* mappedBuffer;
124 GR_VK_CALL_RESULT(vkGpu, result, MapMemory(vkGpu->device(), fImageMemory, 0u,
125 requirements.size, 0u, &mappedBuffer));
126 if (result != VK_SUCCESS) {
127 return false;
128 }
129
130 // Write Y channel.
131 VkImageSubresource subresource;
132 subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT;
133 subresource.mipLevel = 0;
134 subresource.arrayLayer = 0;
135
136 VkSubresourceLayout yLayout;
137 GR_VK_CALL(vkGpu->vkInterface(), GetImageSubresourceLayout(vkGpu->device(), fImage,
138 &subresource, &yLayout));
139 uint8_t* bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + yLayout.offset;
140 for (size_t y = 0; y < height; ++y) {
141 for (size_t x = 0; x < width; ++x) {
142 bufferData[y * yLayout.rowPitch + x] = GetExpectedY(x, y, width, height);
143 }
144 }
145
146 // Write UV channels.
147 subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT;
148 VkSubresourceLayout uvLayout;
149 GR_VK_CALL(vkGpu->vkInterface(), GetImageSubresourceLayout(vkGpu->device(), fImage,
150 &subresource, &uvLayout));
151 bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + uvLayout.offset;
152 for (size_t y = 0; y < height / 2; ++y) {
153 for (size_t x = 0; x < width / 2; ++x) {
154 auto [u, v] = GetExpectedUV(2*x, 2*y, width, height);
155 bufferData[y * uvLayout.rowPitch + x * 2] = u;
156 bufferData[y * uvLayout.rowPitch + x * 2 + 1] = v;
157 }
158 }
159
160 VkMappedMemoryRange flushRange;
161 flushRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
162 flushRange.pNext = nullptr;
163 flushRange.memory = fImageMemory;
164 flushRange.offset = 0;
165 flushRange.size = VK_WHOLE_SIZE;
166 GR_VK_CALL_RESULT(vkGpu, result, FlushMappedMemoryRanges(vkGpu->device(), 1, &flushRange));
167 if (result != VK_SUCCESS) {
168 return false;
169 }
170 GR_VK_CALL(vkGpu->vkInterface(), UnmapMemory(vkGpu->device(), fImageMemory));
171
172 // Bind image memory.
173 GR_VK_CALL_RESULT(vkGpu, result, BindImageMemory(vkGpu->device(), fImage, fImageMemory, 0u));
174 if (result != VK_SUCCESS) {
175 return false;
176 }
177
178 // Wrap the image into SkImage.
179 GrVkYcbcrConversionInfo ycbcrInfo = {vkImageInfo.format,
180 /*externalFormat=*/0,
181 VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
182 VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
183 VK_CHROMA_LOCATION_COSITED_EVEN,
184 VK_CHROMA_LOCATION_COSITED_EVEN,
185 VK_FILTER_LINEAR,
186 false,
187 formatProperties.linearTilingFeatures};
188 GrVkAlloc alloc;
189 alloc.fMemory = fImageMemory;
190 alloc.fOffset = 0;
191 alloc.fSize = requirements.size;
192
193 GrVkImageInfo imageInfo = {fImage,
194 alloc,
195 VK_IMAGE_TILING_LINEAR,
196 VK_IMAGE_LAYOUT_UNDEFINED,
197 vkImageInfo.format,
198 vkImageInfo.usage,
199 1 /* sample count */,
200 1 /* levelCount */,
201 VK_QUEUE_FAMILY_IGNORED,
202 GrProtected::kNo,
203 ycbcrInfo};
204
205 fTexture = GrBackendTexture(width, height, imageInfo);
206 return true;
207 }
208
209 #endif // SK_VULKAN
210