1 /*
2 * Copyright © 2017, Google Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <hardware/gralloc.h>
25 #include <hardware/hardware.h>
26 #include <hardware/hwvulkan.h>
27 #include <vulkan/vk_android_native_buffer.h>
28 #include <vulkan/vk_icd.h>
29 #include <sync/sync.h>
30
31 #include "anv_private.h"
32
33 static int anv_hal_open(const struct hw_module_t* mod, const char* id, struct hw_device_t** dev);
34 static int anv_hal_close(struct hw_device_t *dev);
35
36 static void UNUSED
static_asserts(void)37 static_asserts(void)
38 {
39 STATIC_ASSERT(HWVULKAN_DISPATCH_MAGIC == ICD_LOADER_MAGIC);
40 }
41
42 PUBLIC struct hwvulkan_module_t HAL_MODULE_INFO_SYM = {
43 .common = {
44 .tag = HARDWARE_MODULE_TAG,
45 .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
46 .hal_api_version = HARDWARE_MAKE_API_VERSION(1, 0),
47 .id = HWVULKAN_HARDWARE_MODULE_ID,
48 .name = "Intel Vulkan HAL",
49 .author = "Intel",
50 .methods = &(hw_module_methods_t) {
51 .open = anv_hal_open,
52 },
53 },
54 };
55
56 /* If any bits in test_mask are set, then unset them and return true. */
57 static inline bool
unmask32(uint32_t * inout_mask,uint32_t test_mask)58 unmask32(uint32_t *inout_mask, uint32_t test_mask)
59 {
60 uint32_t orig_mask = *inout_mask;
61 *inout_mask &= ~test_mask;
62 return *inout_mask != orig_mask;
63 }
64
65 static int
anv_hal_open(const struct hw_module_t * mod,const char * id,struct hw_device_t ** dev)66 anv_hal_open(const struct hw_module_t* mod, const char* id,
67 struct hw_device_t** dev)
68 {
69 assert(mod == &HAL_MODULE_INFO_SYM.common);
70 assert(strcmp(id, HWVULKAN_DEVICE_0) == 0);
71
72 hwvulkan_device_t *hal_dev = malloc(sizeof(*hal_dev));
73 if (!hal_dev)
74 return -1;
75
76 *hal_dev = (hwvulkan_device_t) {
77 .common = {
78 .tag = HARDWARE_DEVICE_TAG,
79 .version = HWVULKAN_DEVICE_API_VERSION_0_1,
80 .module = &HAL_MODULE_INFO_SYM.common,
81 .close = anv_hal_close,
82 },
83 .EnumerateInstanceExtensionProperties = anv_EnumerateInstanceExtensionProperties,
84 .CreateInstance = anv_CreateInstance,
85 .GetInstanceProcAddr = anv_GetInstanceProcAddr,
86 };
87
88 *dev = &hal_dev->common;
89 return 0;
90 }
91
92 static int
anv_hal_close(struct hw_device_t * dev)93 anv_hal_close(struct hw_device_t *dev)
94 {
95 /* hwvulkan.h claims that hw_device_t::close() is never called. */
96 return -1;
97 }
98
99 VkResult
anv_image_from_gralloc(VkDevice device_h,const VkImageCreateInfo * base_info,const VkNativeBufferANDROID * gralloc_info,const VkAllocationCallbacks * alloc,VkImage * out_image_h)100 anv_image_from_gralloc(VkDevice device_h,
101 const VkImageCreateInfo *base_info,
102 const VkNativeBufferANDROID *gralloc_info,
103 const VkAllocationCallbacks *alloc,
104 VkImage *out_image_h)
105
106 {
107 ANV_FROM_HANDLE(anv_device, device, device_h);
108 VkImage image_h = VK_NULL_HANDLE;
109 struct anv_image *image = NULL;
110 struct anv_bo *bo = NULL;
111 VkResult result;
112
113 struct anv_image_create_info anv_info = {
114 .vk_info = base_info,
115 .isl_extra_usage_flags = ISL_SURF_USAGE_DISABLE_AUX_BIT,
116 };
117
118 if (gralloc_info->handle->numFds != 1) {
119 return vk_errorf(device->instance, device,
120 VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR,
121 "VkNativeBufferANDROID::handle::numFds is %d, "
122 "expected 1", gralloc_info->handle->numFds);
123 }
124
125 /* Do not close the gralloc handle's dma_buf. The lifetime of the dma_buf
126 * must exceed that of the gralloc handle, and we do not own the gralloc
127 * handle.
128 */
129 int dma_buf = gralloc_info->handle->data[0];
130
131 result = anv_bo_cache_import(device, &device->bo_cache, dma_buf, &bo);
132 if (result != VK_SUCCESS) {
133 return vk_errorf(device->instance, device, result,
134 "failed to import dma-buf from VkNativeBufferANDROID");
135 }
136
137 int i915_tiling = anv_gem_get_tiling(device, bo->gem_handle);
138 switch (i915_tiling) {
139 case I915_TILING_NONE:
140 anv_info.isl_tiling_flags = ISL_TILING_LINEAR_BIT;
141 break;
142 case I915_TILING_X:
143 anv_info.isl_tiling_flags = ISL_TILING_X_BIT;
144 break;
145 case I915_TILING_Y:
146 anv_info.isl_tiling_flags = ISL_TILING_Y0_BIT;
147 break;
148 case -1:
149 result = vk_errorf(device->instance, device,
150 VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR,
151 "DRM_IOCTL_I915_GEM_GET_TILING failed for "
152 "VkNativeBufferANDROID");
153 goto fail_tiling;
154 default:
155 result = vk_errorf(device->instance, device,
156 VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR,
157 "DRM_IOCTL_I915_GEM_GET_TILING returned unknown "
158 "tiling %d for VkNativeBufferANDROID", i915_tiling);
159 goto fail_tiling;
160 }
161
162 enum isl_format format = anv_get_isl_format(&device->info,
163 base_info->format,
164 VK_IMAGE_ASPECT_COLOR_BIT,
165 base_info->tiling);
166 assert(format != ISL_FORMAT_UNSUPPORTED);
167
168 anv_info.stride = gralloc_info->stride *
169 (isl_format_get_layout(format)->bpb / 8);
170
171 result = anv_image_create(device_h, &anv_info, alloc, &image_h);
172 image = anv_image_from_handle(image_h);
173 if (result != VK_SUCCESS)
174 goto fail_create;
175
176 if (bo->size < image->size) {
177 result = vk_errorf(device, device->instance,
178 VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR,
179 "dma-buf from VkNativeBufferANDROID is too small for "
180 "VkImage: %"PRIu64"B < %"PRIu64"B",
181 bo->size, image->size);
182 goto fail_size;
183 }
184
185 assert(image->n_planes == 1);
186 assert(image->planes[0].bo_offset == 0);
187
188 image->planes[0].bo = bo;
189 image->planes[0].bo_is_owned = true;
190
191 /* We need to set the WRITE flag on window system buffers so that GEM will
192 * know we're writing to them and synchronize uses on other rings (for
193 * example, if the display server uses the blitter ring).
194 *
195 * If this function fails and if the imported bo was resident in the cache,
196 * we should avoid updating the bo's flags. Therefore, we defer updating
197 * the flags until success is certain.
198 *
199 */
200 bo->flags &= ~EXEC_OBJECT_ASYNC;
201 bo->flags |= EXEC_OBJECT_WRITE;
202
203 /* Don't clobber the out-parameter until success is certain. */
204 *out_image_h = image_h;
205
206 return VK_SUCCESS;
207
208 fail_size:
209 anv_DestroyImage(device_h, image_h, alloc);
210 fail_create:
211 fail_tiling:
212 anv_bo_cache_release(device, &device->bo_cache, bo);
213
214 return result;
215 }
216
anv_GetSwapchainGrallocUsageANDROID(VkDevice device_h,VkFormat format,VkImageUsageFlags imageUsage,int * grallocUsage)217 VkResult anv_GetSwapchainGrallocUsageANDROID(
218 VkDevice device_h,
219 VkFormat format,
220 VkImageUsageFlags imageUsage,
221 int* grallocUsage)
222 {
223 ANV_FROM_HANDLE(anv_device, device, device_h);
224 struct anv_physical_device *phys_dev = &device->instance->physicalDevice;
225 VkPhysicalDevice phys_dev_h = anv_physical_device_to_handle(phys_dev);
226 VkResult result;
227
228 *grallocUsage = 0;
229 intel_logd("%s: format=%d, usage=0x%x", __func__, format, imageUsage);
230
231 /* WARNING: Android Nougat's libvulkan.so hardcodes the VkImageUsageFlags
232 * returned to applications via VkSurfaceCapabilitiesKHR::supportedUsageFlags.
233 * The relevant code in libvulkan/swapchain.cpp contains this fun comment:
234 *
235 * TODO(jessehall): I think these are right, but haven't thought hard
236 * about it. Do we need to query the driver for support of any of
237 * these?
238 *
239 * Any disagreement between this function and the hardcoded
240 * VkSurfaceCapabilitiesKHR:supportedUsageFlags causes tests
241 * dEQP-VK.wsi.android.swapchain.*.image_usage to fail.
242 */
243
244 const VkPhysicalDeviceImageFormatInfo2KHR image_format_info = {
245 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR,
246 .format = format,
247 .type = VK_IMAGE_TYPE_2D,
248 .tiling = VK_IMAGE_TILING_OPTIMAL,
249 .usage = imageUsage,
250 };
251
252 VkImageFormatProperties2KHR image_format_props = {
253 .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR,
254 };
255
256 /* Check that requested format and usage are supported. */
257 result = anv_GetPhysicalDeviceImageFormatProperties2KHR(phys_dev_h,
258 &image_format_info, &image_format_props);
259 if (result != VK_SUCCESS) {
260 return vk_errorf(device->instance, device, result,
261 "anv_GetPhysicalDeviceImageFormatProperties2KHR failed "
262 "inside %s", __func__);
263 }
264
265 /* Reject STORAGE here to avoid complexity elsewhere. */
266 if (imageUsage & VK_IMAGE_USAGE_STORAGE_BIT) {
267 return vk_errorf(device->instance, device, VK_ERROR_FORMAT_NOT_SUPPORTED,
268 "VK_IMAGE_USAGE_STORAGE_BIT unsupported for gralloc "
269 "swapchain");
270 }
271
272 if (unmask32(&imageUsage, VK_IMAGE_USAGE_TRANSFER_DST_BIT |
273 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
274 *grallocUsage |= GRALLOC_USAGE_HW_RENDER;
275
276 if (unmask32(&imageUsage, VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
277 VK_IMAGE_USAGE_SAMPLED_BIT |
278 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
279 *grallocUsage |= GRALLOC_USAGE_HW_TEXTURE;
280
281 /* All VkImageUsageFlags not explicitly checked here are unsupported for
282 * gralloc swapchains.
283 */
284 if (imageUsage != 0) {
285 return vk_errorf(device->instance, device, VK_ERROR_FORMAT_NOT_SUPPORTED,
286 "unsupported VkImageUsageFlags(0x%x) for gralloc "
287 "swapchain", imageUsage);
288 }
289
290 /* The below formats support GRALLOC_USAGE_HW_FB (that is, display
291 * scanout). This short list of formats is univserally supported on Intel
292 * but is incomplete. The full set of supported formats is dependent on
293 * kernel and hardware.
294 *
295 * FINISHME: Advertise all display-supported formats.
296 */
297 if (format == VK_FORMAT_B8G8R8A8_UNORM ||
298 format == VK_FORMAT_B5G6R5_UNORM_PACK16) {
299 *grallocUsage |= GRALLOC_USAGE_HW_FB |
300 GRALLOC_USAGE_HW_COMPOSER |
301 GRALLOC_USAGE_EXTERNAL_DISP;
302 }
303
304 if (*grallocUsage == 0)
305 return VK_ERROR_FORMAT_NOT_SUPPORTED;
306
307 return VK_SUCCESS;
308 }
309
310 VkResult
anv_AcquireImageANDROID(VkDevice device_h,VkImage image_h,int nativeFenceFd,VkSemaphore semaphore_h,VkFence fence_h)311 anv_AcquireImageANDROID(
312 VkDevice device_h,
313 VkImage image_h,
314 int nativeFenceFd,
315 VkSemaphore semaphore_h,
316 VkFence fence_h)
317 {
318 ANV_FROM_HANDLE(anv_device, device, device_h);
319 VkResult result = VK_SUCCESS;
320
321 if (nativeFenceFd != -1) {
322 /* As a simple, firstpass implementation of VK_ANDROID_native_buffer, we
323 * block on the nativeFenceFd. This may introduce latency and is
324 * definitiely inefficient, yet it's correct.
325 *
326 * FINISHME(chadv): Import the nativeFenceFd into the VkSemaphore and
327 * VkFence.
328 */
329 if (sync_wait(nativeFenceFd, /*timeout*/ -1) < 0) {
330 result = vk_errorf(device->instance, device, VK_ERROR_DEVICE_LOST,
331 "%s: failed to wait on nativeFenceFd=%d",
332 __func__, nativeFenceFd);
333 }
334
335 /* From VK_ANDROID_native_buffer's pseudo spec
336 * (https://source.android.com/devices/graphics/implement-vulkan):
337 *
338 * The driver takes ownership of the fence fd and is responsible for
339 * closing it [...] even if vkAcquireImageANDROID fails and returns
340 * an error.
341 */
342 close(nativeFenceFd);
343
344 if (result != VK_SUCCESS)
345 return result;
346 }
347
348 if (semaphore_h || fence_h) {
349 /* Thanks to implicit sync, the image is ready for GPU access. But we
350 * must still put the semaphore into the "submit" state; otherwise the
351 * client may get unexpected behavior if the client later uses it as
352 * a wait semaphore.
353 *
354 * Because we blocked above on the nativeFenceFd, the image is also
355 * ready for foreign-device access (including CPU access). But we must
356 * still signal the fence; otherwise the client may get unexpected
357 * behavior if the client later waits on it.
358 *
359 * For some values of anv_semaphore_type, we must submit the semaphore
360 * to execbuf in order to signal it. Likewise for anv_fence_type.
361 * Instead of open-coding here the signal operation for each
362 * anv_semaphore_type and anv_fence_type, we piggy-back on
363 * vkQueueSubmit.
364 */
365 const VkSubmitInfo submit = {
366 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
367 .waitSemaphoreCount = 0,
368 .commandBufferCount = 0,
369 .signalSemaphoreCount = (semaphore_h ? 1 : 0),
370 .pSignalSemaphores = &semaphore_h,
371 };
372
373 result = anv_QueueSubmit(anv_queue_to_handle(&device->queue), 1,
374 &submit, fence_h);
375 if (result != VK_SUCCESS) {
376 return vk_errorf(device->instance, device, result,
377 "anv_QueueSubmit failed inside %s", __func__);
378 }
379 }
380
381 return VK_SUCCESS;
382 }
383
384 VkResult
anv_QueueSignalReleaseImageANDROID(VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)385 anv_QueueSignalReleaseImageANDROID(
386 VkQueue queue,
387 uint32_t waitSemaphoreCount,
388 const VkSemaphore* pWaitSemaphores,
389 VkImage image,
390 int* pNativeFenceFd)
391 {
392 VkResult result;
393
394 if (waitSemaphoreCount == 0)
395 goto done;
396
397 result = anv_QueueSubmit(queue, 1,
398 &(VkSubmitInfo) {
399 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
400 .waitSemaphoreCount = 1,
401 .pWaitSemaphores = pWaitSemaphores,
402 },
403 (VkFence) VK_NULL_HANDLE);
404 if (result != VK_SUCCESS)
405 return result;
406
407 done:
408 if (pNativeFenceFd) {
409 /* We can rely implicit on sync because above we submitted all
410 * semaphores to the queue.
411 */
412 *pNativeFenceFd = -1;
413 }
414
415 return VK_SUCCESS;
416 }
417