1 /*
2 * Copyright © 2022 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "vk_android.h"
25
26 #include "vk_common_entrypoints.h"
27 #include "vk_device.h"
28 #include "vk_image.h"
29 #include "vk_log.h"
30 #include "vk_queue.h"
31 #include "vk_util.h"
32
33 #include "util/libsync.h"
34
35 #include <hardware/gralloc.h>
36
37 #if ANDROID_API_LEVEL >= 26
38 #include <hardware/gralloc1.h>
39 #endif
40
41 #include <unistd.h>
42
43 #if ANDROID_API_LEVEL >= 26
44 #include <vndk/hardware_buffer.h>
45
46 /* From the Android hardware_buffer.h header:
47 *
48 * "The buffer will be written to by the GPU as a framebuffer attachment.
49 *
50 * Note that the name of this flag is somewhat misleading: it does not
51 * imply that the buffer contains a color format. A buffer with depth or
52 * stencil format that will be used as a framebuffer attachment should
53 * also have this flag. Use the equivalent flag
54 * AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER to avoid this confusion."
55 *
56 * The flag was renamed from COLOR_OUTPUT to FRAMEBUFFER at Android API
57 * version 29.
58 */
59 #if ANDROID_API_LEVEL < 29
60 #define AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT
61 #endif
62
63 /* Convert an AHB format to a VkFormat, based on the "AHardwareBuffer Format
64 * Equivalence" table in Vulkan spec.
65 *
66 * Note that this only covers a subset of AHB formats defined in NDK. Drivers
67 * can support more AHB formats, including private ones.
68 */
69 VkFormat
vk_ahb_format_to_image_format(uint32_t ahb_format)70 vk_ahb_format_to_image_format(uint32_t ahb_format)
71 {
72 switch (ahb_format) {
73 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
74 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
75 return VK_FORMAT_R8G8B8A8_UNORM;
76 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
77 return VK_FORMAT_R8G8B8_UNORM;
78 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
79 return VK_FORMAT_R5G6B5_UNORM_PACK16;
80 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
81 return VK_FORMAT_R16G16B16A16_SFLOAT;
82 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
83 return VK_FORMAT_A2B10G10R10_UNORM_PACK32;
84 case AHARDWAREBUFFER_FORMAT_D16_UNORM:
85 return VK_FORMAT_D16_UNORM;
86 case AHARDWAREBUFFER_FORMAT_D24_UNORM:
87 return VK_FORMAT_X8_D24_UNORM_PACK32;
88 case AHARDWAREBUFFER_FORMAT_D24_UNORM_S8_UINT:
89 return VK_FORMAT_D24_UNORM_S8_UINT;
90 case AHARDWAREBUFFER_FORMAT_D32_FLOAT:
91 return VK_FORMAT_D32_SFLOAT;
92 case AHARDWAREBUFFER_FORMAT_D32_FLOAT_S8_UINT:
93 return VK_FORMAT_D32_SFLOAT_S8_UINT;
94 case AHARDWAREBUFFER_FORMAT_S8_UINT:
95 return VK_FORMAT_S8_UINT;
96 default:
97 return VK_FORMAT_UNDEFINED;
98 }
99 }
100
101 /* Convert a VkFormat to an AHB format, based on the "AHardwareBuffer Format
102 * Equivalence" table in Vulkan spec.
103 *
104 * Note that this only covers a subset of AHB formats defined in NDK. Drivers
105 * can support more AHB formats, including private ones.
106 */
107 uint32_t
vk_image_format_to_ahb_format(VkFormat vk_format)108 vk_image_format_to_ahb_format(VkFormat vk_format)
109 {
110 switch (vk_format) {
111 case VK_FORMAT_R8G8B8A8_UNORM:
112 return AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
113 case VK_FORMAT_R8G8B8_UNORM:
114 return AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
115 case VK_FORMAT_R5G6B5_UNORM_PACK16:
116 return AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
117 case VK_FORMAT_R16G16B16A16_SFLOAT:
118 return AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
119 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
120 return AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
121 case VK_FORMAT_D16_UNORM:
122 return AHARDWAREBUFFER_FORMAT_D16_UNORM;
123 case VK_FORMAT_X8_D24_UNORM_PACK32:
124 return AHARDWAREBUFFER_FORMAT_D24_UNORM;
125 case VK_FORMAT_D24_UNORM_S8_UINT:
126 return AHARDWAREBUFFER_FORMAT_D24_UNORM_S8_UINT;
127 case VK_FORMAT_D32_SFLOAT:
128 return AHARDWAREBUFFER_FORMAT_D32_FLOAT;
129 case VK_FORMAT_D32_SFLOAT_S8_UINT:
130 return AHARDWAREBUFFER_FORMAT_D32_FLOAT_S8_UINT;
131 case VK_FORMAT_S8_UINT:
132 return AHARDWAREBUFFER_FORMAT_S8_UINT;
133 default:
134 return 0;
135 }
136 }
137
138 /* Construct ahw usage mask from image usage bits, see
139 * 'AHardwareBuffer Usage Equivalence' in Vulkan spec.
140 */
141 uint64_t
vk_image_usage_to_ahb_usage(const VkImageCreateFlags vk_create,const VkImageUsageFlags vk_usage)142 vk_image_usage_to_ahb_usage(const VkImageCreateFlags vk_create,
143 const VkImageUsageFlags vk_usage)
144 {
145 uint64_t ahb_usage = 0;
146 if (vk_usage & (VK_IMAGE_USAGE_SAMPLED_BIT |
147 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
148 ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
149
150 if (vk_usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
151 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT))
152 ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
153
154 if (vk_usage & VK_IMAGE_USAGE_STORAGE_BIT)
155 ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
156
157 if (vk_create & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT)
158 ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP;
159
160 if (vk_create & VK_IMAGE_CREATE_PROTECTED_BIT)
161 ahb_usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
162
163 /* No usage bits set - set at least one GPU usage. */
164 if (ahb_usage == 0)
165 ahb_usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
166
167 return ahb_usage;
168 }
169
170 struct AHardwareBuffer *
vk_alloc_ahardware_buffer(const VkMemoryAllocateInfo * pAllocateInfo)171 vk_alloc_ahardware_buffer(const VkMemoryAllocateInfo *pAllocateInfo)
172 {
173 const VkMemoryDedicatedAllocateInfo *dedicated_info =
174 vk_find_struct_const(pAllocateInfo->pNext,
175 MEMORY_DEDICATED_ALLOCATE_INFO);
176
177 uint32_t w = 0;
178 uint32_t h = 1;
179 uint32_t layers = 1;
180 uint32_t format = 0;
181 uint64_t usage = 0;
182
183 /* If caller passed dedicated information. */
184 if (dedicated_info && dedicated_info->image) {
185 VK_FROM_HANDLE(vk_image, image, dedicated_info->image);
186
187 if (!image->ahb_format)
188 return NULL;
189
190 w = image->extent.width;
191 h = image->extent.height;
192 layers = image->array_layers;
193 format = image->ahb_format;
194 usage = vk_image_usage_to_ahb_usage(image->create_flags,
195 image->usage);
196 } else {
197 /* AHB export allocation for VkBuffer requires a valid allocationSize */
198 assert(pAllocateInfo->allocationSize);
199 w = pAllocateInfo->allocationSize;
200 format = AHARDWAREBUFFER_FORMAT_BLOB;
201 usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER |
202 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN |
203 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
204 }
205
206 struct AHardwareBuffer_Desc desc = {
207 .width = w,
208 .height = h,
209 .layers = layers,
210 .format = format,
211 .usage = usage,
212 };
213
214 struct AHardwareBuffer *ahb;
215 if (AHardwareBuffer_allocate(&desc, &ahb) != 0)
216 return NULL;
217
218 return ahb;
219 }
220 #endif /* ANDROID_API_LEVEL >= 26 */
221
222 VKAPI_ATTR VkResult VKAPI_CALL
vk_common_AcquireImageANDROID(VkDevice _device,VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)223 vk_common_AcquireImageANDROID(VkDevice _device,
224 VkImage image,
225 int nativeFenceFd,
226 VkSemaphore semaphore,
227 VkFence fence)
228 {
229 VK_FROM_HANDLE(vk_device, device, _device);
230 VkResult result = VK_SUCCESS;
231
232 /* From https://source.android.com/devices/graphics/implement-vulkan :
233 *
234 * "The driver takes ownership of the fence file descriptor and closes
235 * the fence file descriptor when no longer needed. The driver must do
236 * so even if neither a semaphore or fence object is provided, or even
237 * if vkAcquireImageANDROID fails and returns an error."
238 *
239 * The Vulkan spec for VkImportFence/SemaphoreFdKHR(), however, requires
240 * the file descriptor to be left alone on failure.
241 */
242 int semaphore_fd = -1, fence_fd = -1;
243 if (nativeFenceFd >= 0) {
244 if (semaphore != VK_NULL_HANDLE && fence != VK_NULL_HANDLE) {
245 /* We have both so we have to import the sync file twice. One of
246 * them needs to be a dup.
247 */
248 semaphore_fd = nativeFenceFd;
249 fence_fd = dup(nativeFenceFd);
250 if (fence_fd < 0) {
251 VkResult err = (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS :
252 VK_ERROR_OUT_OF_HOST_MEMORY;
253 close(nativeFenceFd);
254 return vk_error(device, err);
255 }
256 } else if (semaphore != VK_NULL_HANDLE) {
257 semaphore_fd = nativeFenceFd;
258 } else if (fence != VK_NULL_HANDLE) {
259 fence_fd = nativeFenceFd;
260 } else {
261 /* Nothing to import into so we have to close the file */
262 close(nativeFenceFd);
263 }
264 }
265
266 if (semaphore != VK_NULL_HANDLE) {
267 const VkImportSemaphoreFdInfoKHR info = {
268 .sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
269 .semaphore = semaphore,
270 .flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
271 .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
272 .fd = semaphore_fd,
273 };
274 result = device->dispatch_table.ImportSemaphoreFdKHR(_device, &info);
275 if (result == VK_SUCCESS)
276 semaphore_fd = -1; /* The driver took ownership */
277 }
278
279 if (result == VK_SUCCESS && fence != VK_NULL_HANDLE) {
280 const VkImportFenceFdInfoKHR info = {
281 .sType = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
282 .fence = fence,
283 .flags = VK_FENCE_IMPORT_TEMPORARY_BIT,
284 .handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
285 .fd = fence_fd,
286 };
287 result = device->dispatch_table.ImportFenceFdKHR(_device, &info);
288 if (result == VK_SUCCESS)
289 fence_fd = -1; /* The driver took ownership */
290 }
291
292 if (semaphore_fd >= 0)
293 close(semaphore_fd);
294 if (fence_fd >= 0)
295 close(fence_fd);
296
297 return result;
298 }
299
300 static VkResult
vk_anb_semaphore_init_once(struct vk_queue * queue,struct vk_device * device)301 vk_anb_semaphore_init_once(struct vk_queue *queue, struct vk_device *device)
302 {
303 if (queue->anb_semaphore != VK_NULL_HANDLE)
304 return VK_SUCCESS;
305
306 const VkExportSemaphoreCreateInfo export_info = {
307 .sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
308 .handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
309 };
310 const VkSemaphoreCreateInfo create_info = {
311 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
312 .pNext = &export_info,
313 };
314 return device->dispatch_table.CreateSemaphore(vk_device_to_handle(device),
315 &create_info, NULL,
316 &queue->anb_semaphore);
317 }
318
319 VKAPI_ATTR VkResult VKAPI_CALL
vk_common_QueueSignalReleaseImageANDROID(VkQueue _queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)320 vk_common_QueueSignalReleaseImageANDROID(VkQueue _queue,
321 uint32_t waitSemaphoreCount,
322 const VkSemaphore *pWaitSemaphores,
323 VkImage image,
324 int *pNativeFenceFd)
325 {
326 VK_FROM_HANDLE(vk_queue, queue, _queue);
327 struct vk_device *device = queue->base.device;
328 VkResult result = VK_SUCCESS;
329
330 STACK_ARRAY(VkPipelineStageFlags, stage_flags, MAX2(1, waitSemaphoreCount));
331 for (uint32_t i = 0; i < MAX2(1, waitSemaphoreCount); i++)
332 stage_flags[i] = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
333
334 result = vk_anb_semaphore_init_once(queue, device);
335 if (result != VK_SUCCESS) {
336 STACK_ARRAY_FINISH(stage_flags);
337 return result;
338 }
339
340 const VkSubmitInfo submit_info = {
341 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
342 .waitSemaphoreCount = waitSemaphoreCount,
343 .pWaitSemaphores = pWaitSemaphores,
344 .pWaitDstStageMask = stage_flags,
345 .signalSemaphoreCount = 1,
346 .pSignalSemaphores = &queue->anb_semaphore,
347 };
348 result = device->dispatch_table.QueueSubmit(_queue, 1, &submit_info,
349 VK_NULL_HANDLE);
350 STACK_ARRAY_FINISH(stage_flags);
351 if (result != VK_SUCCESS)
352 return result;
353
354 const VkSemaphoreGetFdInfoKHR get_fd = {
355 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
356 .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
357 .semaphore = queue->anb_semaphore,
358 };
359 return device->dispatch_table.GetSemaphoreFdKHR(vk_device_to_handle(device),
360 &get_fd, pNativeFenceFd);
361 }
362