1 /*
2 * Copyright © 2022 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "vk_android.h"
25
26 #include "vk_buffer.h"
27 #include "vk_common_entrypoints.h"
28 #include "vk_device.h"
29 #include "vk_image.h"
30 #include "vk_log.h"
31 #include "vk_queue.h"
32 #include "vk_util.h"
33
34 #include "util/libsync.h"
35
36 #include <hardware/gralloc.h>
37
38 #if ANDROID_API_LEVEL >= 26
39 #include <hardware/gralloc1.h>
40 #endif
41
42 #include <unistd.h>
43
44 #if ANDROID_API_LEVEL >= 26
45 #include <vndk/hardware_buffer.h>
46
47 /* From the Android hardware_buffer.h header:
48 *
49 * "The buffer will be written to by the GPU as a framebuffer attachment.
50 *
51 * Note that the name of this flag is somewhat misleading: it does not
52 * imply that the buffer contains a color format. A buffer with depth or
53 * stencil format that will be used as a framebuffer attachment should
54 * also have this flag. Use the equivalent flag
55 * AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER to avoid this confusion."
56 *
57 * The flag was renamed from COLOR_OUTPUT to FRAMEBUFFER at Android API
58 * version 29.
59 */
60 #if ANDROID_API_LEVEL < 29
61 #define AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT
62 #endif
63
64 /* Convert an AHB format to a VkFormat, based on the "AHardwareBuffer Format
65 * Equivalence" table in Vulkan spec.
66 *
67 * Note that this only covers a subset of AHB formats defined in NDK. Drivers
68 * can support more AHB formats, including private ones.
69 */
70 VkFormat
vk_ahb_format_to_image_format(uint32_t ahb_format)71 vk_ahb_format_to_image_format(uint32_t ahb_format)
72 {
73 switch (ahb_format) {
74 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
75 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
76 return VK_FORMAT_R8G8B8A8_UNORM;
77 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
78 return VK_FORMAT_R8G8B8_UNORM;
79 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
80 return VK_FORMAT_R5G6B5_UNORM_PACK16;
81 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
82 return VK_FORMAT_R16G16B16A16_SFLOAT;
83 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
84 return VK_FORMAT_A2B10G10R10_UNORM_PACK32;
85 case AHARDWAREBUFFER_FORMAT_D16_UNORM:
86 return VK_FORMAT_D16_UNORM;
87 case AHARDWAREBUFFER_FORMAT_D24_UNORM:
88 return VK_FORMAT_X8_D24_UNORM_PACK32;
89 case AHARDWAREBUFFER_FORMAT_D24_UNORM_S8_UINT:
90 return VK_FORMAT_D24_UNORM_S8_UINT;
91 case AHARDWAREBUFFER_FORMAT_D32_FLOAT:
92 return VK_FORMAT_D32_SFLOAT;
93 case AHARDWAREBUFFER_FORMAT_D32_FLOAT_S8_UINT:
94 return VK_FORMAT_D32_SFLOAT_S8_UINT;
95 case AHARDWAREBUFFER_FORMAT_S8_UINT:
96 return VK_FORMAT_S8_UINT;
97 case AHARDWAREBUFFER_FORMAT_R8_UNORM:
98 return VK_FORMAT_R8_UNORM;
99 default:
100 return VK_FORMAT_UNDEFINED;
101 }
102 }
103
104 /* Convert a VkFormat to an AHB format, based on the "AHardwareBuffer Format
105 * Equivalence" table in Vulkan spec.
106 *
107 * Note that this only covers a subset of AHB formats defined in NDK. Drivers
108 * can support more AHB formats, including private ones.
109 */
110 uint32_t
vk_image_format_to_ahb_format(VkFormat vk_format)111 vk_image_format_to_ahb_format(VkFormat vk_format)
112 {
113 switch (vk_format) {
114 case VK_FORMAT_R8G8B8A8_UNORM:
115 return AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
116 case VK_FORMAT_R8G8B8_UNORM:
117 return AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
118 case VK_FORMAT_R5G6B5_UNORM_PACK16:
119 return AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
120 case VK_FORMAT_R16G16B16A16_SFLOAT:
121 return AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
122 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
123 return AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
124 case VK_FORMAT_D16_UNORM:
125 return AHARDWAREBUFFER_FORMAT_D16_UNORM;
126 case VK_FORMAT_X8_D24_UNORM_PACK32:
127 return AHARDWAREBUFFER_FORMAT_D24_UNORM;
128 case VK_FORMAT_D24_UNORM_S8_UINT:
129 return AHARDWAREBUFFER_FORMAT_D24_UNORM_S8_UINT;
130 case VK_FORMAT_D32_SFLOAT:
131 return AHARDWAREBUFFER_FORMAT_D32_FLOAT;
132 case VK_FORMAT_D32_SFLOAT_S8_UINT:
133 return AHARDWAREBUFFER_FORMAT_D32_FLOAT_S8_UINT;
134 case VK_FORMAT_S8_UINT:
135 return AHARDWAREBUFFER_FORMAT_S8_UINT;
136 case VK_FORMAT_R8_UNORM:
137 return AHARDWAREBUFFER_FORMAT_R8_UNORM;
138 default:
139 return 0;
140 }
141 }
142
143 /* Construct ahw usage mask from image usage bits, see
144 * 'AHardwareBuffer Usage Equivalence' in Vulkan spec.
145 */
146 uint64_t
vk_image_usage_to_ahb_usage(const VkImageCreateFlags vk_create,const VkImageUsageFlags vk_usage)147 vk_image_usage_to_ahb_usage(const VkImageCreateFlags vk_create,
148 const VkImageUsageFlags vk_usage)
149 {
150 uint64_t ahb_usage = 0;
151 if (vk_usage & (VK_IMAGE_USAGE_SAMPLED_BIT |
152 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
153 ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
154
155 if (vk_usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
156 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT))
157 ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
158
159 if (vk_usage & VK_IMAGE_USAGE_STORAGE_BIT)
160 ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
161
162 if (vk_create & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT)
163 ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP;
164
165 if (vk_create & VK_IMAGE_CREATE_PROTECTED_BIT)
166 ahb_usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
167
168 /* No usage bits set - set at least one GPU usage. */
169 if (ahb_usage == 0)
170 ahb_usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
171
172 return ahb_usage;
173 }
174
175 struct AHardwareBuffer *
vk_alloc_ahardware_buffer(const VkMemoryAllocateInfo * pAllocateInfo)176 vk_alloc_ahardware_buffer(const VkMemoryAllocateInfo *pAllocateInfo)
177 {
178 const VkMemoryDedicatedAllocateInfo *dedicated_info =
179 vk_find_struct_const(pAllocateInfo->pNext,
180 MEMORY_DEDICATED_ALLOCATE_INFO);
181
182 uint32_t w = 0;
183 uint32_t h = 1;
184 uint32_t layers = 1;
185 uint32_t format = 0;
186 uint64_t usage = 0;
187
188 /* If caller passed dedicated information. */
189 if (dedicated_info && dedicated_info->image) {
190 VK_FROM_HANDLE(vk_image, image, dedicated_info->image);
191
192 if (!image->ahb_format)
193 return NULL;
194
195 w = image->extent.width;
196 h = image->extent.height;
197 layers = image->array_layers;
198 format = image->ahb_format;
199 usage = vk_image_usage_to_ahb_usage(image->create_flags,
200 image->usage);
201 } else if (dedicated_info && dedicated_info->buffer) {
202 VK_FROM_HANDLE(vk_buffer, buffer, dedicated_info->buffer);
203 w = buffer->size;
204 format = AHARDWAREBUFFER_FORMAT_BLOB;
205 usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN |
206 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
207 } else {
208 w = pAllocateInfo->allocationSize;
209 format = AHARDWAREBUFFER_FORMAT_BLOB;
210 usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN |
211 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
212 }
213
214 struct AHardwareBuffer_Desc desc = {
215 .width = w,
216 .height = h,
217 .layers = layers,
218 .format = format,
219 .usage = usage,
220 };
221
222 struct AHardwareBuffer *ahb;
223 if (AHardwareBuffer_allocate(&desc, &ahb) != 0)
224 return NULL;
225
226 return ahb;
227 }
228 #endif /* ANDROID_API_LEVEL >= 26 */
229
230 VKAPI_ATTR VkResult VKAPI_CALL
vk_common_AcquireImageANDROID(VkDevice _device,VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)231 vk_common_AcquireImageANDROID(VkDevice _device,
232 VkImage image,
233 int nativeFenceFd,
234 VkSemaphore semaphore,
235 VkFence fence)
236 {
237 VK_FROM_HANDLE(vk_device, device, _device);
238 VkResult result = VK_SUCCESS;
239
240 /* From https://source.android.com/devices/graphics/implement-vulkan :
241 *
242 * "The driver takes ownership of the fence file descriptor and closes
243 * the fence file descriptor when no longer needed. The driver must do
244 * so even if neither a semaphore or fence object is provided, or even
245 * if vkAcquireImageANDROID fails and returns an error."
246 *
247 * The Vulkan spec for VkImportFence/SemaphoreFdKHR(), however, requires
248 * the file descriptor to be left alone on failure.
249 */
250 int semaphore_fd = -1, fence_fd = -1;
251 if (nativeFenceFd >= 0) {
252 if (semaphore != VK_NULL_HANDLE && fence != VK_NULL_HANDLE) {
253 /* We have both so we have to import the sync file twice. One of
254 * them needs to be a dup.
255 */
256 semaphore_fd = nativeFenceFd;
257 fence_fd = dup(nativeFenceFd);
258 if (fence_fd < 0) {
259 VkResult err = (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS :
260 VK_ERROR_OUT_OF_HOST_MEMORY;
261 close(nativeFenceFd);
262 return vk_error(device, err);
263 }
264 } else if (semaphore != VK_NULL_HANDLE) {
265 semaphore_fd = nativeFenceFd;
266 } else if (fence != VK_NULL_HANDLE) {
267 fence_fd = nativeFenceFd;
268 } else {
269 /* Nothing to import into so we have to close the file */
270 close(nativeFenceFd);
271 }
272 }
273
274 if (semaphore != VK_NULL_HANDLE) {
275 const VkImportSemaphoreFdInfoKHR info = {
276 .sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
277 .semaphore = semaphore,
278 .flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
279 .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
280 .fd = semaphore_fd,
281 };
282 result = device->dispatch_table.ImportSemaphoreFdKHR(_device, &info);
283 if (result == VK_SUCCESS)
284 semaphore_fd = -1; /* The driver took ownership */
285 }
286
287 if (result == VK_SUCCESS && fence != VK_NULL_HANDLE) {
288 const VkImportFenceFdInfoKHR info = {
289 .sType = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
290 .fence = fence,
291 .flags = VK_FENCE_IMPORT_TEMPORARY_BIT,
292 .handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
293 .fd = fence_fd,
294 };
295 result = device->dispatch_table.ImportFenceFdKHR(_device, &info);
296 if (result == VK_SUCCESS)
297 fence_fd = -1; /* The driver took ownership */
298 }
299
300 if (semaphore_fd >= 0)
301 close(semaphore_fd);
302 if (fence_fd >= 0)
303 close(fence_fd);
304
305 return result;
306 }
307
308 static VkResult
vk_anb_semaphore_init_once(struct vk_queue * queue,struct vk_device * device)309 vk_anb_semaphore_init_once(struct vk_queue *queue, struct vk_device *device)
310 {
311 if (queue->anb_semaphore != VK_NULL_HANDLE)
312 return VK_SUCCESS;
313
314 const VkExportSemaphoreCreateInfo export_info = {
315 .sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
316 .handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
317 };
318 const VkSemaphoreCreateInfo create_info = {
319 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
320 .pNext = &export_info,
321 };
322 return device->dispatch_table.CreateSemaphore(vk_device_to_handle(device),
323 &create_info, NULL,
324 &queue->anb_semaphore);
325 }
326
327 VKAPI_ATTR VkResult VKAPI_CALL
vk_common_QueueSignalReleaseImageANDROID(VkQueue _queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)328 vk_common_QueueSignalReleaseImageANDROID(VkQueue _queue,
329 uint32_t waitSemaphoreCount,
330 const VkSemaphore *pWaitSemaphores,
331 VkImage image,
332 int *pNativeFenceFd)
333 {
334 VK_FROM_HANDLE(vk_queue, queue, _queue);
335 struct vk_device *device = queue->base.device;
336 VkResult result = VK_SUCCESS;
337
338 STACK_ARRAY(VkPipelineStageFlags, stage_flags, MAX2(1, waitSemaphoreCount));
339 for (uint32_t i = 0; i < MAX2(1, waitSemaphoreCount); i++)
340 stage_flags[i] = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
341
342 result = vk_anb_semaphore_init_once(queue, device);
343 if (result != VK_SUCCESS)
344 return result;
345
346 const VkSubmitInfo submit_info = {
347 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
348 .waitSemaphoreCount = waitSemaphoreCount,
349 .pWaitSemaphores = pWaitSemaphores,
350 .pWaitDstStageMask = stage_flags,
351 .signalSemaphoreCount = 1,
352 .pSignalSemaphores = &queue->anb_semaphore,
353 };
354 result = device->dispatch_table.QueueSubmit(_queue, 1, &submit_info,
355 VK_NULL_HANDLE);
356 if (result != VK_SUCCESS)
357 return result;
358
359 const VkSemaphoreGetFdInfoKHR get_fd = {
360 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
361 .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
362 .semaphore = queue->anb_semaphore,
363 };
364 return device->dispatch_table.GetSemaphoreFdKHR(vk_device_to_handle(device),
365 &get_fd, pNativeFenceFd);
366 }
367