1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <wayland-client.h>
25
26 #include <assert.h>
27 #include <stdlib.h>
28 #include <stdio.h>
29 #include <unistd.h>
30 #include <errno.h>
31 #include <string.h>
32 #include <pthread.h>
33
34 #include "vk_util.h"
35 #include "wsi_common_private.h"
36 #include "wsi_common_wayland.h"
37 #include "wayland-drm-client-protocol.h"
38
39 #include <util/hash_table.h>
40 #include <util/u_vector.h>
41
42 #define typed_memcpy(dest, src, count) ({ \
43 STATIC_ASSERT(sizeof(*src) == sizeof(*dest)); \
44 memcpy((dest), (src), (count) * sizeof(*(src))); \
45 })
46
47 struct wsi_wayland;
48
49 struct wsi_wl_display {
50 /* The real wl_display */
51 struct wl_display * wl_display;
52 /* Actually a proxy wrapper around the event queue */
53 struct wl_display * wl_display_wrapper;
54 struct wl_event_queue * queue;
55 struct wl_drm * drm;
56
57 struct wsi_wayland *wsi_wl;
58 /* Vector of VkFormats supported */
59 struct u_vector formats;
60
61 uint32_t capabilities;
62
63 /* Only used for displays created by wsi_wl_display_create */
64 uint32_t refcount;
65 };
66
67 struct wsi_wayland {
68 struct wsi_interface base;
69
70 struct wsi_device *wsi;
71
72 const VkAllocationCallbacks *alloc;
73 VkPhysicalDevice physical_device;
74 };
75
76 static void
wsi_wl_display_add_vk_format(struct wsi_wl_display * display,VkFormat format)77 wsi_wl_display_add_vk_format(struct wsi_wl_display *display, VkFormat format)
78 {
79 /* Don't add a format that's already in the list */
80 VkFormat *f;
81 u_vector_foreach(f, &display->formats)
82 if (*f == format)
83 return;
84
85 /* Don't add formats that aren't renderable. */
86 VkFormatProperties props;
87
88 display->wsi_wl->wsi->GetPhysicalDeviceFormatProperties(display->wsi_wl->physical_device,
89 format, &props);
90 if (!(props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
91 return;
92
93 f = u_vector_add(&display->formats);
94 if (f)
95 *f = format;
96 }
97
98 static void
drm_handle_device(void * data,struct wl_drm * drm,const char * name)99 drm_handle_device(void *data, struct wl_drm *drm, const char *name)
100 {
101 }
102
103 static uint32_t
wl_drm_format_for_vk_format(VkFormat vk_format,bool alpha)104 wl_drm_format_for_vk_format(VkFormat vk_format, bool alpha)
105 {
106 switch (vk_format) {
107 /* TODO: Figure out what all the formats mean and make this table
108 * correct.
109 */
110 #if 0
111 case VK_FORMAT_R4G4B4A4_UNORM:
112 return alpha ? WL_DRM_FORMAT_ABGR4444 : WL_DRM_FORMAT_XBGR4444;
113 case VK_FORMAT_R5G6B5_UNORM:
114 return WL_DRM_FORMAT_BGR565;
115 case VK_FORMAT_R5G5B5A1_UNORM:
116 return alpha ? WL_DRM_FORMAT_ABGR1555 : WL_DRM_FORMAT_XBGR1555;
117 case VK_FORMAT_R8G8B8_UNORM:
118 return WL_DRM_FORMAT_XBGR8888;
119 case VK_FORMAT_R8G8B8A8_UNORM:
120 return alpha ? WL_DRM_FORMAT_ABGR8888 : WL_DRM_FORMAT_XBGR8888;
121 case VK_FORMAT_R10G10B10A2_UNORM:
122 return alpha ? WL_DRM_FORMAT_ABGR2101010 : WL_DRM_FORMAT_XBGR2101010;
123 case VK_FORMAT_B4G4R4A4_UNORM:
124 return alpha ? WL_DRM_FORMAT_ARGB4444 : WL_DRM_FORMAT_XRGB4444;
125 case VK_FORMAT_B5G6R5_UNORM:
126 return WL_DRM_FORMAT_RGB565;
127 case VK_FORMAT_B5G5R5A1_UNORM:
128 return alpha ? WL_DRM_FORMAT_XRGB1555 : WL_DRM_FORMAT_XRGB1555;
129 #endif
130 case VK_FORMAT_B8G8R8_UNORM:
131 case VK_FORMAT_B8G8R8_SRGB:
132 return WL_DRM_FORMAT_BGRX8888;
133 case VK_FORMAT_B8G8R8A8_UNORM:
134 case VK_FORMAT_B8G8R8A8_SRGB:
135 return alpha ? WL_DRM_FORMAT_ARGB8888 : WL_DRM_FORMAT_XRGB8888;
136 #if 0
137 case VK_FORMAT_B10G10R10A2_UNORM:
138 return alpha ? WL_DRM_FORMAT_ARGB2101010 : WL_DRM_FORMAT_XRGB2101010;
139 #endif
140
141 default:
142 assert(!"Unsupported Vulkan format");
143 return 0;
144 }
145 }
146
147 static void
drm_handle_format(void * data,struct wl_drm * drm,uint32_t wl_format)148 drm_handle_format(void *data, struct wl_drm *drm, uint32_t wl_format)
149 {
150 struct wsi_wl_display *display = data;
151 if (display->formats.element_size == 0)
152 return;
153
154 switch (wl_format) {
155 #if 0
156 case WL_DRM_FORMAT_ABGR4444:
157 case WL_DRM_FORMAT_XBGR4444:
158 wsi_wl_display_add_vk_format(display, VK_FORMAT_R4G4B4A4_UNORM);
159 break;
160 case WL_DRM_FORMAT_BGR565:
161 wsi_wl_display_add_vk_format(display, VK_FORMAT_R5G6B5_UNORM);
162 break;
163 case WL_DRM_FORMAT_ABGR1555:
164 case WL_DRM_FORMAT_XBGR1555:
165 wsi_wl_display_add_vk_format(display, VK_FORMAT_R5G5B5A1_UNORM);
166 break;
167 case WL_DRM_FORMAT_XBGR8888:
168 wsi_wl_display_add_vk_format(display, VK_FORMAT_R8G8B8_UNORM);
169 /* fallthrough */
170 case WL_DRM_FORMAT_ABGR8888:
171 wsi_wl_display_add_vk_format(display, VK_FORMAT_R8G8B8A8_UNORM);
172 break;
173 case WL_DRM_FORMAT_ABGR2101010:
174 case WL_DRM_FORMAT_XBGR2101010:
175 wsi_wl_display_add_vk_format(display, VK_FORMAT_R10G10B10A2_UNORM);
176 break;
177 case WL_DRM_FORMAT_ARGB4444:
178 case WL_DRM_FORMAT_XRGB4444:
179 wsi_wl_display_add_vk_format(display, VK_FORMAT_B4G4R4A4_UNORM);
180 break;
181 case WL_DRM_FORMAT_RGB565:
182 wsi_wl_display_add_vk_format(display, VK_FORMAT_B5G6R5_UNORM);
183 break;
184 case WL_DRM_FORMAT_ARGB1555:
185 case WL_DRM_FORMAT_XRGB1555:
186 wsi_wl_display_add_vk_format(display, VK_FORMAT_B5G5R5A1_UNORM);
187 break;
188 #endif
189 case WL_DRM_FORMAT_XRGB8888:
190 wsi_wl_display_add_vk_format(display, VK_FORMAT_B8G8R8_SRGB);
191 wsi_wl_display_add_vk_format(display, VK_FORMAT_B8G8R8_UNORM);
192 /* fallthrough */
193 case WL_DRM_FORMAT_ARGB8888:
194 wsi_wl_display_add_vk_format(display, VK_FORMAT_B8G8R8A8_SRGB);
195 wsi_wl_display_add_vk_format(display, VK_FORMAT_B8G8R8A8_UNORM);
196 break;
197 #if 0
198 case WL_DRM_FORMAT_ARGB2101010:
199 case WL_DRM_FORMAT_XRGB2101010:
200 wsi_wl_display_add_vk_format(display, VK_FORMAT_B10G10R10A2_UNORM);
201 break;
202 #endif
203 }
204 }
205
206 static void
drm_handle_authenticated(void * data,struct wl_drm * drm)207 drm_handle_authenticated(void *data, struct wl_drm *drm)
208 {
209 }
210
211 static void
drm_handle_capabilities(void * data,struct wl_drm * drm,uint32_t capabilities)212 drm_handle_capabilities(void *data, struct wl_drm *drm, uint32_t capabilities)
213 {
214 struct wsi_wl_display *display = data;
215
216 display->capabilities = capabilities;
217 }
218
219 static const struct wl_drm_listener drm_listener = {
220 drm_handle_device,
221 drm_handle_format,
222 drm_handle_authenticated,
223 drm_handle_capabilities,
224 };
225
226 static void
registry_handle_global(void * data,struct wl_registry * registry,uint32_t name,const char * interface,uint32_t version)227 registry_handle_global(void *data, struct wl_registry *registry,
228 uint32_t name, const char *interface, uint32_t version)
229 {
230 struct wsi_wl_display *display = data;
231
232 if (strcmp(interface, "wl_drm") == 0) {
233 assert(display->drm == NULL);
234
235 assert(version >= 2);
236 display->drm = wl_registry_bind(registry, name, &wl_drm_interface, 2);
237
238 if (display->drm)
239 wl_drm_add_listener(display->drm, &drm_listener, display);
240 }
241 }
242
243 static void
registry_handle_global_remove(void * data,struct wl_registry * registry,uint32_t name)244 registry_handle_global_remove(void *data, struct wl_registry *registry,
245 uint32_t name)
246 { /* No-op */ }
247
248 static const struct wl_registry_listener registry_listener = {
249 registry_handle_global,
250 registry_handle_global_remove
251 };
252
253 static void
wsi_wl_display_finish(struct wsi_wl_display * display)254 wsi_wl_display_finish(struct wsi_wl_display *display)
255 {
256 assert(display->refcount == 0);
257
258 u_vector_finish(&display->formats);
259 if (display->drm)
260 wl_drm_destroy(display->drm);
261 if (display->wl_display_wrapper)
262 wl_proxy_wrapper_destroy(display->wl_display_wrapper);
263 if (display->queue)
264 wl_event_queue_destroy(display->queue);
265 }
266
267 static VkResult
wsi_wl_display_init(struct wsi_wayland * wsi_wl,struct wsi_wl_display * display,struct wl_display * wl_display,bool get_format_list)268 wsi_wl_display_init(struct wsi_wayland *wsi_wl,
269 struct wsi_wl_display *display,
270 struct wl_display *wl_display,
271 bool get_format_list)
272 {
273 VkResult result = VK_SUCCESS;
274 memset(display, 0, sizeof(*display));
275
276 display->wsi_wl = wsi_wl;
277 display->wl_display = wl_display;
278
279 if (get_format_list) {
280 if (!u_vector_init(&display->formats, sizeof(VkFormat), 8)) {
281 result = VK_ERROR_OUT_OF_HOST_MEMORY;
282 goto fail;
283 }
284 }
285
286 display->queue = wl_display_create_queue(wl_display);
287 if (!display->queue) {
288 result = VK_ERROR_OUT_OF_HOST_MEMORY;
289 goto fail;
290 }
291
292 display->wl_display_wrapper = wl_proxy_create_wrapper(wl_display);
293 if (!display->wl_display_wrapper) {
294 result = VK_ERROR_OUT_OF_HOST_MEMORY;
295 goto fail;
296 }
297
298 wl_proxy_set_queue((struct wl_proxy *) display->wl_display_wrapper,
299 display->queue);
300
301 struct wl_registry *registry =
302 wl_display_get_registry(display->wl_display_wrapper);
303 if (!registry) {
304 result = VK_ERROR_OUT_OF_HOST_MEMORY;
305 goto fail;
306 }
307
308 wl_registry_add_listener(registry, ®istry_listener, display);
309
310 /* Round-trip to get the wl_drm global */
311 wl_display_roundtrip_queue(display->wl_display, display->queue);
312
313 if (!display->drm) {
314 result = VK_ERROR_SURFACE_LOST_KHR;
315 goto fail_registry;
316 }
317
318 /* Round-trip to get wl_drm formats and capabilities */
319 wl_display_roundtrip_queue(display->wl_display, display->queue);
320
321 /* We need prime support */
322 if (!(display->capabilities & WL_DRM_CAPABILITY_PRIME)) {
323 result = VK_ERROR_SURFACE_LOST_KHR;
324 goto fail_registry;
325 }
326
327 /* We don't need this anymore */
328 wl_registry_destroy(registry);
329
330 display->refcount = 0;
331
332 return VK_SUCCESS;
333
334 fail_registry:
335 if (registry)
336 wl_registry_destroy(registry);
337
338 fail:
339 wsi_wl_display_finish(display);
340 return result;
341 }
342
343 static VkResult
wsi_wl_display_create(struct wsi_wayland * wsi,struct wl_display * wl_display,struct wsi_wl_display ** display_out)344 wsi_wl_display_create(struct wsi_wayland *wsi, struct wl_display *wl_display,
345 struct wsi_wl_display **display_out)
346 {
347 struct wsi_wl_display *display =
348 vk_alloc(wsi->alloc, sizeof(*display), 8,
349 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
350 if (!display)
351 return VK_ERROR_OUT_OF_HOST_MEMORY;
352
353 VkResult result = wsi_wl_display_init(wsi, display, wl_display, true);
354 if (result != VK_SUCCESS) {
355 vk_free(wsi->alloc, display);
356 return result;
357 }
358
359 display->refcount++;
360 *display_out = display;
361
362 return result;
363 }
364
365 static struct wsi_wl_display *
wsi_wl_display_ref(struct wsi_wl_display * display)366 wsi_wl_display_ref(struct wsi_wl_display *display)
367 {
368 display->refcount++;
369 return display;
370 }
371
372 static void
wsi_wl_display_unref(struct wsi_wl_display * display)373 wsi_wl_display_unref(struct wsi_wl_display *display)
374 {
375 if (display->refcount-- > 1)
376 return;
377
378 struct wsi_wayland *wsi = display->wsi_wl;
379 wsi_wl_display_finish(display);
380 vk_free(wsi->alloc, display);
381 }
382
383 VkBool32
wsi_wl_get_presentation_support(struct wsi_device * wsi_device,struct wl_display * wl_display)384 wsi_wl_get_presentation_support(struct wsi_device *wsi_device,
385 struct wl_display *wl_display)
386 {
387 struct wsi_wayland *wsi =
388 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
389
390 struct wsi_wl_display display;
391 int ret = wsi_wl_display_init(wsi, &display, wl_display, false);
392 wsi_wl_display_finish(&display);
393
394 return ret == 0;
395 }
396
397 static VkResult
wsi_wl_surface_get_support(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,const VkAllocationCallbacks * alloc,uint32_t queueFamilyIndex,int local_fd,VkBool32 * pSupported)398 wsi_wl_surface_get_support(VkIcdSurfaceBase *surface,
399 struct wsi_device *wsi_device,
400 const VkAllocationCallbacks *alloc,
401 uint32_t queueFamilyIndex,
402 int local_fd,
403 VkBool32* pSupported)
404 {
405 *pSupported = true;
406
407 return VK_SUCCESS;
408 }
409
410 static const VkPresentModeKHR present_modes[] = {
411 VK_PRESENT_MODE_MAILBOX_KHR,
412 VK_PRESENT_MODE_FIFO_KHR,
413 };
414
415 static VkResult
wsi_wl_surface_get_capabilities(VkIcdSurfaceBase * surface,VkSurfaceCapabilitiesKHR * caps)416 wsi_wl_surface_get_capabilities(VkIcdSurfaceBase *surface,
417 VkSurfaceCapabilitiesKHR* caps)
418 {
419 /* For true mailbox mode, we need at least 4 images:
420 * 1) One to scan out from
421 * 2) One to have queued for scan-out
422 * 3) One to be currently held by the Wayland compositor
423 * 4) One to render to
424 */
425 caps->minImageCount = 4;
426 /* There is no real maximum */
427 caps->maxImageCount = 0;
428
429 caps->currentExtent = (VkExtent2D) { -1, -1 };
430 caps->minImageExtent = (VkExtent2D) { 1, 1 };
431 /* This is the maximum supported size on Intel */
432 caps->maxImageExtent = (VkExtent2D) { 1 << 14, 1 << 14 };
433 caps->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
434 caps->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
435 caps->maxImageArrayLayers = 1;
436
437 caps->supportedCompositeAlpha =
438 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
439 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
440
441 caps->supportedUsageFlags =
442 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
443 VK_IMAGE_USAGE_SAMPLED_BIT |
444 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
445 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
446
447 return VK_SUCCESS;
448 }
449
450 static VkResult
wsi_wl_surface_get_capabilities2(VkIcdSurfaceBase * surface,const void * info_next,VkSurfaceCapabilities2KHR * caps)451 wsi_wl_surface_get_capabilities2(VkIcdSurfaceBase *surface,
452 const void *info_next,
453 VkSurfaceCapabilities2KHR* caps)
454 {
455 assert(caps->sType == VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR);
456
457 return wsi_wl_surface_get_capabilities(surface, &caps->surfaceCapabilities);
458 }
459
460 static VkResult
wsi_wl_surface_get_formats(VkIcdSurfaceBase * icd_surface,struct wsi_device * wsi_device,uint32_t * pSurfaceFormatCount,VkSurfaceFormatKHR * pSurfaceFormats)461 wsi_wl_surface_get_formats(VkIcdSurfaceBase *icd_surface,
462 struct wsi_device *wsi_device,
463 uint32_t* pSurfaceFormatCount,
464 VkSurfaceFormatKHR* pSurfaceFormats)
465 {
466 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
467 struct wsi_wayland *wsi =
468 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
469
470 struct wsi_wl_display display;
471 if (wsi_wl_display_init(wsi, &display, surface->display, true))
472 return VK_ERROR_SURFACE_LOST_KHR;
473
474 VK_OUTARRAY_MAKE(out, pSurfaceFormats, pSurfaceFormatCount);
475
476 VkFormat *disp_fmt;
477 u_vector_foreach(disp_fmt, &display.formats) {
478 vk_outarray_append(&out, out_fmt) {
479 out_fmt->format = *disp_fmt;
480 out_fmt->colorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
481 }
482 }
483
484 wsi_wl_display_finish(&display);
485
486 return vk_outarray_status(&out);
487 }
488
489 static VkResult
wsi_wl_surface_get_formats2(VkIcdSurfaceBase * icd_surface,struct wsi_device * wsi_device,const void * info_next,uint32_t * pSurfaceFormatCount,VkSurfaceFormat2KHR * pSurfaceFormats)490 wsi_wl_surface_get_formats2(VkIcdSurfaceBase *icd_surface,
491 struct wsi_device *wsi_device,
492 const void *info_next,
493 uint32_t* pSurfaceFormatCount,
494 VkSurfaceFormat2KHR* pSurfaceFormats)
495 {
496 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
497 struct wsi_wayland *wsi =
498 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
499
500 struct wsi_wl_display display;
501 if (wsi_wl_display_init(wsi, &display, surface->display, true))
502 return VK_ERROR_SURFACE_LOST_KHR;
503
504 VK_OUTARRAY_MAKE(out, pSurfaceFormats, pSurfaceFormatCount);
505
506 VkFormat *disp_fmt;
507 u_vector_foreach(disp_fmt, &display.formats) {
508 vk_outarray_append(&out, out_fmt) {
509 out_fmt->surfaceFormat.format = *disp_fmt;
510 out_fmt->surfaceFormat.colorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
511 }
512 }
513
514 wsi_wl_display_finish(&display);
515
516 return vk_outarray_status(&out);
517 }
518
519 static VkResult
wsi_wl_surface_get_present_modes(VkIcdSurfaceBase * surface,uint32_t * pPresentModeCount,VkPresentModeKHR * pPresentModes)520 wsi_wl_surface_get_present_modes(VkIcdSurfaceBase *surface,
521 uint32_t* pPresentModeCount,
522 VkPresentModeKHR* pPresentModes)
523 {
524 if (pPresentModes == NULL) {
525 *pPresentModeCount = ARRAY_SIZE(present_modes);
526 return VK_SUCCESS;
527 }
528
529 *pPresentModeCount = MIN2(*pPresentModeCount, ARRAY_SIZE(present_modes));
530 typed_memcpy(pPresentModes, present_modes, *pPresentModeCount);
531
532 if (*pPresentModeCount < ARRAY_SIZE(present_modes))
533 return VK_INCOMPLETE;
534 else
535 return VK_SUCCESS;
536 }
537
wsi_create_wl_surface(const VkAllocationCallbacks * pAllocator,const VkWaylandSurfaceCreateInfoKHR * pCreateInfo,VkSurfaceKHR * pSurface)538 VkResult wsi_create_wl_surface(const VkAllocationCallbacks *pAllocator,
539 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
540 VkSurfaceKHR *pSurface)
541 {
542 VkIcdSurfaceWayland *surface;
543
544 surface = vk_alloc(pAllocator, sizeof *surface, 8,
545 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
546 if (surface == NULL)
547 return VK_ERROR_OUT_OF_HOST_MEMORY;
548
549 surface->base.platform = VK_ICD_WSI_PLATFORM_WAYLAND;
550 surface->display = pCreateInfo->display;
551 surface->surface = pCreateInfo->surface;
552
553 *pSurface = VkIcdSurfaceBase_to_handle(&surface->base);
554
555 return VK_SUCCESS;
556 }
557
558 struct wsi_wl_image {
559 struct wsi_image base;
560 struct wl_buffer * buffer;
561 bool busy;
562 };
563
564 struct wsi_wl_swapchain {
565 struct wsi_swapchain base;
566
567 struct wsi_wl_display *display;
568
569 struct wl_surface * surface;
570 uint32_t surface_version;
571 struct wl_drm * drm_wrapper;
572 struct wl_callback * frame;
573
574 VkExtent2D extent;
575 VkFormat vk_format;
576 uint32_t drm_format;
577
578 VkPresentModeKHR present_mode;
579 bool fifo_ready;
580
581 struct wsi_wl_image images[0];
582 };
583
584 static struct wsi_image *
wsi_wl_swapchain_get_wsi_image(struct wsi_swapchain * wsi_chain,uint32_t image_index)585 wsi_wl_swapchain_get_wsi_image(struct wsi_swapchain *wsi_chain,
586 uint32_t image_index)
587 {
588 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
589 return &chain->images[image_index].base;
590 }
591
592 static VkResult
wsi_wl_swapchain_acquire_next_image(struct wsi_swapchain * wsi_chain,uint64_t timeout,VkSemaphore semaphore,uint32_t * image_index)593 wsi_wl_swapchain_acquire_next_image(struct wsi_swapchain *wsi_chain,
594 uint64_t timeout,
595 VkSemaphore semaphore,
596 uint32_t *image_index)
597 {
598 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
599
600 int ret = wl_display_dispatch_queue_pending(chain->display->wl_display,
601 chain->display->queue);
602 /* XXX: I'm not sure if out-of-date is the right error here. If
603 * wl_display_dispatch_queue_pending fails it most likely means we got
604 * kicked by the server so this seems more-or-less correct.
605 */
606 if (ret < 0)
607 return VK_ERROR_OUT_OF_DATE_KHR;
608
609 while (1) {
610 for (uint32_t i = 0; i < chain->base.image_count; i++) {
611 if (!chain->images[i].busy) {
612 /* We found a non-busy image */
613 *image_index = i;
614 chain->images[i].busy = true;
615 return VK_SUCCESS;
616 }
617 }
618
619 /* This time we do a blocking dispatch because we can't go
620 * anywhere until we get an event.
621 */
622 int ret = wl_display_roundtrip_queue(chain->display->wl_display,
623 chain->display->queue);
624 if (ret < 0)
625 return VK_ERROR_OUT_OF_DATE_KHR;
626 }
627 }
628
629 static void
frame_handle_done(void * data,struct wl_callback * callback,uint32_t serial)630 frame_handle_done(void *data, struct wl_callback *callback, uint32_t serial)
631 {
632 struct wsi_wl_swapchain *chain = data;
633
634 chain->frame = NULL;
635 chain->fifo_ready = true;
636
637 wl_callback_destroy(callback);
638 }
639
640 static const struct wl_callback_listener frame_listener = {
641 frame_handle_done,
642 };
643
644 static VkResult
wsi_wl_swapchain_queue_present(struct wsi_swapchain * wsi_chain,uint32_t image_index,const VkPresentRegionKHR * damage)645 wsi_wl_swapchain_queue_present(struct wsi_swapchain *wsi_chain,
646 uint32_t image_index,
647 const VkPresentRegionKHR *damage)
648 {
649 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
650
651 if (chain->base.present_mode == VK_PRESENT_MODE_FIFO_KHR) {
652 while (!chain->fifo_ready) {
653 int ret = wl_display_dispatch_queue(chain->display->wl_display,
654 chain->display->queue);
655 if (ret < 0)
656 return VK_ERROR_OUT_OF_DATE_KHR;
657 }
658 }
659
660 assert(image_index < chain->base.image_count);
661 wl_surface_attach(chain->surface, chain->images[image_index].buffer, 0, 0);
662
663 if (chain->surface_version >= 4 && damage &&
664 damage->pRectangles && damage->rectangleCount > 0) {
665 for (unsigned i = 0; i < damage->rectangleCount; i++) {
666 const VkRectLayerKHR *rect = &damage->pRectangles[i];
667 assert(rect->layer == 0);
668 wl_surface_damage_buffer(chain->surface,
669 rect->offset.x, rect->offset.y,
670 rect->extent.width, rect->extent.height);
671 }
672 } else {
673 wl_surface_damage(chain->surface, 0, 0, INT32_MAX, INT32_MAX);
674 }
675
676 if (chain->base.present_mode == VK_PRESENT_MODE_FIFO_KHR) {
677 chain->frame = wl_surface_frame(chain->surface);
678 wl_callback_add_listener(chain->frame, &frame_listener, chain);
679 chain->fifo_ready = false;
680 }
681
682 chain->images[image_index].busy = true;
683 wl_surface_commit(chain->surface);
684 wl_display_flush(chain->display->wl_display);
685
686 return VK_SUCCESS;
687 }
688
689 static void
buffer_handle_release(void * data,struct wl_buffer * buffer)690 buffer_handle_release(void *data, struct wl_buffer *buffer)
691 {
692 struct wsi_wl_image *image = data;
693
694 assert(image->buffer == buffer);
695
696 image->busy = false;
697 }
698
699 static const struct wl_buffer_listener buffer_listener = {
700 buffer_handle_release,
701 };
702
703 static VkResult
wsi_wl_image_init(struct wsi_wl_swapchain * chain,struct wsi_wl_image * image,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator)704 wsi_wl_image_init(struct wsi_wl_swapchain *chain,
705 struct wsi_wl_image *image,
706 const VkSwapchainCreateInfoKHR *pCreateInfo,
707 const VkAllocationCallbacks* pAllocator)
708 {
709 VkResult result;
710
711 result = wsi_create_native_image(&chain->base, pCreateInfo, &image->base);
712 if (result != VK_SUCCESS)
713 return result;
714
715 image->buffer = wl_drm_create_prime_buffer(chain->drm_wrapper,
716 image->base.fd, /* name */
717 chain->extent.width,
718 chain->extent.height,
719 chain->drm_format,
720 image->base.offset,
721 image->base.row_pitch,
722 0, 0, 0, 0 /* unused */);
723 close(image->base.fd);
724
725 if (!image->buffer)
726 goto fail_image;
727
728 wl_buffer_add_listener(image->buffer, &buffer_listener, image);
729
730 return VK_SUCCESS;
731
732 fail_image:
733 wsi_destroy_image(&chain->base, &image->base);
734
735 return result;
736 }
737
738 static VkResult
wsi_wl_swapchain_destroy(struct wsi_swapchain * wsi_chain,const VkAllocationCallbacks * pAllocator)739 wsi_wl_swapchain_destroy(struct wsi_swapchain *wsi_chain,
740 const VkAllocationCallbacks *pAllocator)
741 {
742 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
743
744 for (uint32_t i = 0; i < chain->base.image_count; i++) {
745 if (chain->images[i].buffer) {
746 wl_buffer_destroy(chain->images[i].buffer);
747 wsi_destroy_image(&chain->base, &chain->images[i].base);
748 }
749 }
750
751 if (chain->frame)
752 wl_callback_destroy(chain->frame);
753 if (chain->surface)
754 wl_proxy_wrapper_destroy(chain->surface);
755 if (chain->drm_wrapper)
756 wl_proxy_wrapper_destroy(chain->drm_wrapper);
757
758 if (chain->display)
759 wsi_wl_display_unref(chain->display);
760
761 wsi_swapchain_finish(&chain->base);
762
763 vk_free(pAllocator, chain);
764
765 return VK_SUCCESS;
766 }
767
768 static VkResult
wsi_wl_surface_create_swapchain(VkIcdSurfaceBase * icd_surface,VkDevice device,struct wsi_device * wsi_device,int local_fd,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,struct wsi_swapchain ** swapchain_out)769 wsi_wl_surface_create_swapchain(VkIcdSurfaceBase *icd_surface,
770 VkDevice device,
771 struct wsi_device *wsi_device,
772 int local_fd,
773 const VkSwapchainCreateInfoKHR* pCreateInfo,
774 const VkAllocationCallbacks* pAllocator,
775 struct wsi_swapchain **swapchain_out)
776 {
777 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
778 struct wsi_wayland *wsi =
779 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
780 struct wsi_wl_swapchain *chain;
781 VkResult result;
782
783 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR);
784
785 int num_images = pCreateInfo->minImageCount;
786
787 size_t size = sizeof(*chain) + num_images * sizeof(chain->images[0]);
788 chain = vk_alloc(pAllocator, size, 8,
789 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
790 if (chain == NULL)
791 return VK_ERROR_OUT_OF_HOST_MEMORY;
792
793 result = wsi_swapchain_init(wsi_device, &chain->base, device,
794 pCreateInfo, pAllocator);
795 if (result != VK_SUCCESS) {
796 vk_free(pAllocator, chain);
797 return result;
798 }
799
800 /* Mark a bunch of stuff as NULL. This way we can just call
801 * destroy_swapchain for cleanup.
802 */
803 for (uint32_t i = 0; i < num_images; i++)
804 chain->images[i].buffer = NULL;
805 chain->surface = NULL;
806 chain->drm_wrapper = NULL;
807 chain->frame = NULL;
808
809 bool alpha = pCreateInfo->compositeAlpha ==
810 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
811
812 chain->base.destroy = wsi_wl_swapchain_destroy;
813 chain->base.get_wsi_image = wsi_wl_swapchain_get_wsi_image;
814 chain->base.acquire_next_image = wsi_wl_swapchain_acquire_next_image;
815 chain->base.queue_present = wsi_wl_swapchain_queue_present;
816 chain->base.present_mode = pCreateInfo->presentMode;
817 chain->base.image_count = num_images;
818 chain->extent = pCreateInfo->imageExtent;
819 chain->vk_format = pCreateInfo->imageFormat;
820 chain->drm_format = wl_drm_format_for_vk_format(chain->vk_format, alpha);
821
822 if (pCreateInfo->oldSwapchain) {
823 /* If we have an oldSwapchain parameter, copy the display struct over
824 * from the old one so we don't have to fully re-initialize it.
825 */
826 struct wsi_wl_swapchain *old_chain = (void *)pCreateInfo->oldSwapchain;
827 chain->display = wsi_wl_display_ref(old_chain->display);
828 } else {
829 chain->display = NULL;
830 result = wsi_wl_display_create(wsi, surface->display, &chain->display);
831 if (result != VK_SUCCESS)
832 goto fail;
833 }
834
835 chain->surface = wl_proxy_create_wrapper(surface->surface);
836 if (!chain->surface) {
837 result = VK_ERROR_OUT_OF_HOST_MEMORY;
838 goto fail;
839 }
840 wl_proxy_set_queue((struct wl_proxy *) chain->surface,
841 chain->display->queue);
842 chain->surface_version = wl_proxy_get_version((void *)surface->surface);
843
844 chain->drm_wrapper = wl_proxy_create_wrapper(chain->display->drm);
845 if (!chain->drm_wrapper) {
846 result = VK_ERROR_OUT_OF_HOST_MEMORY;
847 goto fail;
848 }
849 wl_proxy_set_queue((struct wl_proxy *) chain->drm_wrapper,
850 chain->display->queue);
851
852 chain->fifo_ready = true;
853
854 for (uint32_t i = 0; i < chain->base.image_count; i++) {
855 result = wsi_wl_image_init(chain, &chain->images[i],
856 pCreateInfo, pAllocator);
857 if (result != VK_SUCCESS)
858 goto fail;
859 chain->images[i].busy = false;
860 }
861
862 *swapchain_out = &chain->base;
863
864 return VK_SUCCESS;
865
866 fail:
867 wsi_wl_swapchain_destroy(&chain->base, pAllocator);
868
869 return result;
870 }
871
872 VkResult
wsi_wl_init_wsi(struct wsi_device * wsi_device,const VkAllocationCallbacks * alloc,VkPhysicalDevice physical_device)873 wsi_wl_init_wsi(struct wsi_device *wsi_device,
874 const VkAllocationCallbacks *alloc,
875 VkPhysicalDevice physical_device)
876 {
877 struct wsi_wayland *wsi;
878 VkResult result;
879
880 wsi = vk_alloc(alloc, sizeof(*wsi), 8,
881 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
882 if (!wsi) {
883 result = VK_ERROR_OUT_OF_HOST_MEMORY;
884 goto fail;
885 }
886
887 wsi->physical_device = physical_device;
888 wsi->alloc = alloc;
889 wsi->wsi = wsi_device;
890
891 wsi->base.get_support = wsi_wl_surface_get_support;
892 wsi->base.get_capabilities = wsi_wl_surface_get_capabilities;
893 wsi->base.get_capabilities2 = wsi_wl_surface_get_capabilities2;
894 wsi->base.get_formats = wsi_wl_surface_get_formats;
895 wsi->base.get_formats2 = wsi_wl_surface_get_formats2;
896 wsi->base.get_present_modes = wsi_wl_surface_get_present_modes;
897 wsi->base.create_swapchain = wsi_wl_surface_create_swapchain;
898
899 wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = &wsi->base;
900
901 return VK_SUCCESS;
902
903 fail:
904 wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = NULL;
905
906 return result;
907 }
908
909 void
wsi_wl_finish_wsi(struct wsi_device * wsi_device,const VkAllocationCallbacks * alloc)910 wsi_wl_finish_wsi(struct wsi_device *wsi_device,
911 const VkAllocationCallbacks *alloc)
912 {
913 struct wsi_wayland *wsi =
914 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
915 if (!wsi)
916 return;
917
918 vk_free(alloc, wsi);
919 }
920