1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <wayland-client.h>
25
26 #include <assert.h>
27 #include <stdlib.h>
28 #include <stdio.h>
29 #include <unistd.h>
30 #include <errno.h>
31 #include <string.h>
32 #include <pthread.h>
33 #include <poll.h>
34 #include <sys/mman.h>
35
36 #include "drm-uapi/drm_fourcc.h"
37
38 #include "vk_instance.h"
39 #include "vk_physical_device.h"
40 #include "vk_util.h"
41 #include "wsi_common_entrypoints.h"
42 #include "wsi_common_private.h"
43 #include "linux-dmabuf-unstable-v1-client-protocol.h"
44
45 #include <util/compiler.h>
46 #include <util/hash_table.h>
47 #include <util/timespec.h>
48 #include <util/u_vector.h>
49 #include <util/anon_file.h>
50
51 struct wsi_wayland;
52
53 struct wsi_wl_format {
54 VkFormat vk_format;
55 uint32_t has_alpha_format;
56 uint32_t has_opaque_format;
57 struct u_vector modifiers;
58 };
59
60 struct wsi_wl_display {
61 /* The real wl_display */
62 struct wl_display * wl_display;
63 /* Actually a proxy wrapper around the event queue */
64 struct wl_display * wl_display_wrapper;
65 struct wl_event_queue * queue;
66
67 struct wl_shm * wl_shm;
68 struct zwp_linux_dmabuf_v1 * wl_dmabuf;
69
70 struct wsi_wayland *wsi_wl;
71
72 /* Formats populated by zwp_linux_dmabuf_v1 or wl_shm interfaces */
73 struct u_vector formats;
74
75 /* Only used for displays created by wsi_wl_display_create */
76 uint32_t refcount;
77
78 bool sw;
79 };
80
81 struct wsi_wayland {
82 struct wsi_interface base;
83
84 struct wsi_device *wsi;
85
86 const VkAllocationCallbacks *alloc;
87 VkPhysicalDevice physical_device;
88 };
89
90 static struct wsi_wl_format *
find_format(struct u_vector * formats,VkFormat format)91 find_format(struct u_vector *formats, VkFormat format)
92 {
93 struct wsi_wl_format *f;
94
95 u_vector_foreach(f, formats)
96 if (f->vk_format == format)
97 return f;
98
99 return NULL;
100 }
101
102 static struct wsi_wl_format *
wsi_wl_display_add_vk_format(struct wsi_wl_display * display,struct u_vector * formats,VkFormat format,bool has_alpha_format,bool has_opaque_format)103 wsi_wl_display_add_vk_format(struct wsi_wl_display *display,
104 struct u_vector *formats,
105 VkFormat format,
106 bool has_alpha_format,
107 bool has_opaque_format)
108 {
109 /* Don't add a format that's already in the list */
110 struct wsi_wl_format *f = find_format(formats, format);
111 if (f) {
112 if (has_alpha_format)
113 f->has_alpha_format = true;
114 if (has_opaque_format)
115 f->has_opaque_format = true;
116 return f;
117 }
118
119 /* Don't add formats that aren't renderable. */
120 VkFormatProperties props;
121
122 display->wsi_wl->wsi->GetPhysicalDeviceFormatProperties(display->wsi_wl->physical_device,
123 format, &props);
124 if (!(props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
125 return NULL;
126
127 struct u_vector modifiers;
128 if (!u_vector_init_pow2(&modifiers, 4, sizeof(uint64_t)))
129 return NULL;
130
131 f = u_vector_add(formats);
132 if (!f) {
133 u_vector_finish(&modifiers);
134 return NULL;
135 }
136
137 f->vk_format = format;
138 f->has_alpha_format = has_alpha_format;
139 f->has_opaque_format = has_opaque_format;
140 f->modifiers = modifiers;
141
142 return f;
143 }
144
145 static void
wsi_wl_format_add_modifier(struct wsi_wl_format * format,uint64_t modifier)146 wsi_wl_format_add_modifier(struct wsi_wl_format *format, uint64_t modifier)
147 {
148 uint64_t *mod;
149
150 if (modifier == DRM_FORMAT_MOD_INVALID)
151 return;
152
153 u_vector_foreach(mod, &format->modifiers)
154 if (*mod == modifier)
155 return;
156
157 mod = u_vector_add(&format->modifiers);
158 if (mod)
159 *mod = modifier;
160 }
161
162 static void
wsi_wl_display_add_drm_format_modifier(struct wsi_wl_display * display,struct u_vector * formats,uint32_t drm_format,uint64_t modifier)163 wsi_wl_display_add_drm_format_modifier(struct wsi_wl_display *display,
164 struct u_vector *formats,
165 uint32_t drm_format, uint64_t modifier)
166 {
167 struct wsi_wl_format *format = NULL, *srgb_format = NULL;
168
169 switch (drm_format) {
170 #if 0
171 /* TODO: These are only available when VK_EXT_4444_formats is enabled, so
172 * we probably need to make their use conditional on this extension. */
173 case DRM_FORMAT_ARGB4444:
174 format = wsi_wl_display_add_vk_format(display, formats,
175 VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT,
176 true, false);
177 break;
178 case DRM_FORMAT_XRGB4444:
179 format = wsi_wl_display_add_vk_format(display, formats,
180 VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT,
181 false, true);
182 break;
183 case DRM_FORMAT_ABGR4444:
184 format = wsi_wl_display_add_vk_format(display, formats,
185 VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT,
186 true, false);
187 break;
188 case DRM_FORMAT_XBGR4444:
189 format = wsi_wl_display_add_vk_format(display, formats,
190 VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT,
191 false, true);
192 break;
193 #endif
194
195 /* Vulkan _PACKN formats have the same component order as DRM formats
196 * on little endian systems, on big endian there exists no analog. */
197 #if MESA_LITTLE_ENDIAN
198 case DRM_FORMAT_RGBA4444:
199 format = wsi_wl_display_add_vk_format(display, formats,
200 VK_FORMAT_R4G4B4A4_UNORM_PACK16,
201 true, false);
202 break;
203 case DRM_FORMAT_RGBX4444:
204 format = wsi_wl_display_add_vk_format(display, formats,
205 VK_FORMAT_R4G4B4A4_UNORM_PACK16,
206 false, true);
207 break;
208 case DRM_FORMAT_BGRA4444:
209 format = wsi_wl_display_add_vk_format(display, formats,
210 VK_FORMAT_B4G4R4A4_UNORM_PACK16,
211 true, false);
212 break;
213 case DRM_FORMAT_BGRX4444:
214 format = wsi_wl_display_add_vk_format(display, formats,
215 VK_FORMAT_B4G4R4A4_UNORM_PACK16,
216 false, true);
217 break;
218 case DRM_FORMAT_RGB565:
219 format = wsi_wl_display_add_vk_format(display, formats,
220 VK_FORMAT_R5G6B5_UNORM_PACK16,
221 true, true);
222 break;
223 case DRM_FORMAT_BGR565:
224 format = wsi_wl_display_add_vk_format(display, formats,
225 VK_FORMAT_B5G6R5_UNORM_PACK16,
226 true, true);
227 break;
228 case DRM_FORMAT_ARGB1555:
229 format = wsi_wl_display_add_vk_format(display, formats,
230 VK_FORMAT_A1R5G5B5_UNORM_PACK16,
231 true, false);
232 break;
233 case DRM_FORMAT_XRGB1555:
234 format = wsi_wl_display_add_vk_format(display, formats,
235 VK_FORMAT_A1R5G5B5_UNORM_PACK16,
236 false, true);
237 break;
238 case DRM_FORMAT_RGBA5551:
239 format = wsi_wl_display_add_vk_format(display, formats,
240 VK_FORMAT_R5G5B5A1_UNORM_PACK16,
241 true, false);
242 break;
243 case DRM_FORMAT_RGBX5551:
244 format = wsi_wl_display_add_vk_format(display, formats,
245 VK_FORMAT_R5G5B5A1_UNORM_PACK16,
246 false, true);
247 break;
248 case DRM_FORMAT_BGRA5551:
249 format = wsi_wl_display_add_vk_format(display, formats,
250 VK_FORMAT_B5G5R5A1_UNORM_PACK16,
251 true, false);
252 break;
253 case DRM_FORMAT_BGRX5551:
254 format = wsi_wl_display_add_vk_format(display, formats,
255 VK_FORMAT_B5G5R5A1_UNORM_PACK16,
256 false, true);
257 break;
258 case DRM_FORMAT_ARGB2101010:
259 format = wsi_wl_display_add_vk_format(display, formats,
260 VK_FORMAT_A2R10G10B10_UNORM_PACK32,
261 true, false);
262 break;
263 case DRM_FORMAT_XRGB2101010:
264 format = wsi_wl_display_add_vk_format(display, formats,
265 VK_FORMAT_A2R10G10B10_UNORM_PACK32,
266 false, true);
267 break;
268 case DRM_FORMAT_ABGR2101010:
269 format = wsi_wl_display_add_vk_format(display, formats,
270 VK_FORMAT_A2B10G10R10_UNORM_PACK32,
271 true, false);
272 break;
273 case DRM_FORMAT_XBGR2101010:
274 format = wsi_wl_display_add_vk_format(display, formats,
275 VK_FORMAT_A2B10G10R10_UNORM_PACK32,
276 false, true);
277 break;
278 #endif
279
280 /* Non-packed 8-bit formats have an inverted channel order compared to the
281 * little endian DRM formats, because the DRM channel ordering is high->low
282 * but the vulkan channel ordering is in memory byte order
283 *
284 * For all UNORM formats which have a SRGB variant, we must support both if
285 * we can. SRGB in this context means that rendering to it will result in a
286 * linear -> nonlinear SRGB colorspace conversion before the data is stored.
287 * The inverse function is applied when sampling from SRGB images.
288 * From Wayland's perspective nothing changes, the difference is just how
289 * Vulkan interprets the pixel data. */
290 case DRM_FORMAT_XBGR8888:
291 srgb_format = wsi_wl_display_add_vk_format(display, formats,
292 VK_FORMAT_R8G8B8_SRGB,
293 true, true);
294 format = wsi_wl_display_add_vk_format(display, formats,
295 VK_FORMAT_R8G8B8_UNORM,
296 true, true);
297 FALLTHROUGH;
298 case DRM_FORMAT_ABGR8888:
299 srgb_format = wsi_wl_display_add_vk_format(display, formats,
300 VK_FORMAT_R8G8B8A8_SRGB,
301 true, true);
302 format = wsi_wl_display_add_vk_format(display, formats,
303 VK_FORMAT_R8G8B8A8_UNORM,
304 true, true);
305 break;
306 case DRM_FORMAT_XRGB8888:
307 srgb_format = wsi_wl_display_add_vk_format(display, formats,
308 VK_FORMAT_B8G8R8_SRGB,
309 true, true);
310 format = wsi_wl_display_add_vk_format(display, formats,
311 VK_FORMAT_B8G8R8_UNORM,
312 true, true);
313 FALLTHROUGH;
314 case DRM_FORMAT_ARGB8888:
315 srgb_format = wsi_wl_display_add_vk_format(display, formats,
316 VK_FORMAT_B8G8R8A8_SRGB,
317 true, true);
318 format = wsi_wl_display_add_vk_format(display, formats,
319 VK_FORMAT_B8G8R8A8_UNORM,
320 true, true);
321 break;
322 }
323
324 if (format)
325 wsi_wl_format_add_modifier(format, modifier);
326 if (srgb_format)
327 wsi_wl_format_add_modifier(srgb_format, modifier);
328 }
329
330 static void
wsi_wl_display_add_wl_shm_format(struct wsi_wl_display * display,struct u_vector * formats,uint32_t wl_shm_format)331 wsi_wl_display_add_wl_shm_format(struct wsi_wl_display *display,
332 struct u_vector *formats,
333 uint32_t wl_shm_format)
334 {
335 switch (wl_shm_format) {
336 case WL_SHM_FORMAT_XBGR8888:
337 wsi_wl_display_add_vk_format(display, formats,
338 VK_FORMAT_R8G8B8_SRGB,
339 false, true);
340 wsi_wl_display_add_vk_format(display, formats,
341 VK_FORMAT_R8G8B8_UNORM,
342 false, true);
343 FALLTHROUGH;
344 case WL_SHM_FORMAT_ABGR8888:
345 wsi_wl_display_add_vk_format(display, formats,
346 VK_FORMAT_R8G8B8A8_SRGB,
347 true, false);
348 wsi_wl_display_add_vk_format(display, formats,
349 VK_FORMAT_R8G8B8A8_UNORM,
350 true, false);
351 break;
352 case WL_SHM_FORMAT_XRGB8888:
353 wsi_wl_display_add_vk_format(display, formats,
354 VK_FORMAT_B8G8R8_SRGB,
355 false, true);
356 wsi_wl_display_add_vk_format(display, formats,
357 VK_FORMAT_B8G8R8_UNORM,
358 false, true);
359 FALLTHROUGH;
360 case WL_SHM_FORMAT_ARGB8888:
361 wsi_wl_display_add_vk_format(display, formats,
362 VK_FORMAT_B8G8R8A8_SRGB,
363 true, false);
364 wsi_wl_display_add_vk_format(display, formats,
365 VK_FORMAT_B8G8R8A8_UNORM,
366 true, false);
367 break;
368 }
369 }
370
371 static uint32_t
wl_drm_format_for_vk_format(VkFormat vk_format,bool alpha)372 wl_drm_format_for_vk_format(VkFormat vk_format, bool alpha)
373 {
374 switch (vk_format) {
375 #if 0
376 case VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT:
377 return alpha ? DRM_FORMAT_ARGB4444 : DRM_FORMAT_XRGB4444;
378 case VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT:
379 return alpha ? DRM_FORMAT_ABGR4444 : DRM_FORMAT_XBGR4444;
380 #endif
381 #if MESA_LITTLE_ENDIAN
382 case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
383 return alpha ? DRM_FORMAT_RGBA4444 : DRM_FORMAT_RGBX4444;
384 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
385 return alpha ? DRM_FORMAT_BGRA4444 : DRM_FORMAT_BGRX4444;
386 case VK_FORMAT_R5G6B5_UNORM_PACK16:
387 return DRM_FORMAT_RGB565;
388 case VK_FORMAT_B5G6R5_UNORM_PACK16:
389 return DRM_FORMAT_BGR565;
390 case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
391 return alpha ? DRM_FORMAT_ARGB1555 : DRM_FORMAT_XRGB1555;
392 case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
393 return alpha ? DRM_FORMAT_RGBA5551 : DRM_FORMAT_RGBX5551;
394 case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
395 return alpha ? DRM_FORMAT_BGRA5551 : DRM_FORMAT_BGRX5551;
396 case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
397 return alpha ? DRM_FORMAT_ARGB2101010 : DRM_FORMAT_XRGB2101010;
398 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
399 return alpha ? DRM_FORMAT_ABGR2101010 : DRM_FORMAT_XBGR2101010;
400 #endif
401 case VK_FORMAT_R8G8B8_UNORM:
402 case VK_FORMAT_R8G8B8_SRGB:
403 return DRM_FORMAT_XBGR8888;
404 case VK_FORMAT_R8G8B8A8_UNORM:
405 case VK_FORMAT_R8G8B8A8_SRGB:
406 return alpha ? DRM_FORMAT_ABGR8888 : DRM_FORMAT_XBGR8888;
407 case VK_FORMAT_B8G8R8_UNORM:
408 case VK_FORMAT_B8G8R8_SRGB:
409 return DRM_FORMAT_BGRX8888;
410 case VK_FORMAT_B8G8R8A8_UNORM:
411 case VK_FORMAT_B8G8R8A8_SRGB:
412 return alpha ? DRM_FORMAT_ARGB8888 : DRM_FORMAT_XRGB8888;
413
414 default:
415 assert(!"Unsupported Vulkan format");
416 return 0;
417 }
418 }
419
420 static uint32_t
wl_shm_format_for_vk_format(VkFormat vk_format,bool alpha)421 wl_shm_format_for_vk_format(VkFormat vk_format, bool alpha)
422 {
423 switch (vk_format) {
424 case VK_FORMAT_R8G8B8A8_UNORM:
425 case VK_FORMAT_R8G8B8A8_SRGB:
426 return alpha ? WL_SHM_FORMAT_ABGR8888 : WL_SHM_FORMAT_XBGR8888;
427 case VK_FORMAT_B8G8R8A8_UNORM:
428 case VK_FORMAT_B8G8R8A8_SRGB:
429 return alpha ? WL_SHM_FORMAT_ARGB8888 : WL_SHM_FORMAT_XRGB8888;
430
431 default:
432 assert(!"Unsupported Vulkan format");
433 return 0;
434 }
435 }
436
437 static void
dmabuf_handle_format(void * data,struct zwp_linux_dmabuf_v1 * dmabuf,uint32_t format)438 dmabuf_handle_format(void *data, struct zwp_linux_dmabuf_v1 *dmabuf,
439 uint32_t format)
440 {
441 /* Formats are implicitly advertised by the modifier event, so we ignore
442 * them here. */
443 }
444
445 static void
dmabuf_handle_modifier(void * data,struct zwp_linux_dmabuf_v1 * dmabuf,uint32_t format,uint32_t modifier_hi,uint32_t modifier_lo)446 dmabuf_handle_modifier(void *data, struct zwp_linux_dmabuf_v1 *dmabuf,
447 uint32_t format, uint32_t modifier_hi,
448 uint32_t modifier_lo)
449 {
450 struct wsi_wl_display *display = data;
451 uint64_t modifier;
452
453 modifier = ((uint64_t) modifier_hi << 32) | modifier_lo;
454 wsi_wl_display_add_drm_format_modifier(display, &display->formats,
455 format, modifier);
456 }
457
458 static const struct zwp_linux_dmabuf_v1_listener dmabuf_listener = {
459 dmabuf_handle_format,
460 dmabuf_handle_modifier,
461 };
462
463 static void
shm_handle_format(void * data,struct wl_shm * shm,uint32_t format)464 shm_handle_format(void *data, struct wl_shm *shm, uint32_t format)
465 {
466 struct wsi_wl_display *display = data;
467
468 wsi_wl_display_add_wl_shm_format(display, &display->formats, format);
469 }
470
471 static const struct wl_shm_listener shm_listener = {
472 .format = shm_handle_format
473 };
474
475 static void
registry_handle_global(void * data,struct wl_registry * registry,uint32_t name,const char * interface,uint32_t version)476 registry_handle_global(void *data, struct wl_registry *registry,
477 uint32_t name, const char *interface, uint32_t version)
478 {
479 struct wsi_wl_display *display = data;
480
481 if (display->sw) {
482 if (strcmp(interface, "wl_shm") == 0) {
483 display->wl_shm = wl_registry_bind(registry, name, &wl_shm_interface, 1);
484 wl_shm_add_listener(display->wl_shm, &shm_listener, display);
485 }
486 return;
487 }
488
489 if (strcmp(interface, "zwp_linux_dmabuf_v1") == 0 && version >= 3) {
490 display->wl_dmabuf =
491 wl_registry_bind(registry, name, &zwp_linux_dmabuf_v1_interface, 3);
492 zwp_linux_dmabuf_v1_add_listener(display->wl_dmabuf,
493 &dmabuf_listener, display);
494 }
495 }
496
497 static void
registry_handle_global_remove(void * data,struct wl_registry * registry,uint32_t name)498 registry_handle_global_remove(void *data, struct wl_registry *registry,
499 uint32_t name)
500 { /* No-op */ }
501
502 static const struct wl_registry_listener registry_listener = {
503 registry_handle_global,
504 registry_handle_global_remove
505 };
506
507 static void
wsi_wl_display_finish(struct wsi_wl_display * display)508 wsi_wl_display_finish(struct wsi_wl_display *display)
509 {
510 assert(display->refcount == 0);
511
512 struct wsi_wl_format *f;
513 u_vector_foreach(f, &display->formats)
514 u_vector_finish(&f->modifiers);
515 u_vector_finish(&display->formats);
516 if (display->wl_shm)
517 wl_shm_destroy(display->wl_shm);
518 if (display->wl_dmabuf)
519 zwp_linux_dmabuf_v1_destroy(display->wl_dmabuf);
520 if (display->wl_display_wrapper)
521 wl_proxy_wrapper_destroy(display->wl_display_wrapper);
522 if (display->queue)
523 wl_event_queue_destroy(display->queue);
524 }
525
526 static VkResult
wsi_wl_display_init(struct wsi_wayland * wsi_wl,struct wsi_wl_display * display,struct wl_display * wl_display,bool get_format_list,bool sw)527 wsi_wl_display_init(struct wsi_wayland *wsi_wl,
528 struct wsi_wl_display *display,
529 struct wl_display *wl_display,
530 bool get_format_list, bool sw)
531 {
532 VkResult result = VK_SUCCESS;
533 memset(display, 0, sizeof(*display));
534
535 if (!u_vector_init(&display->formats, 8, sizeof(struct wsi_wl_format)))
536 return VK_ERROR_OUT_OF_HOST_MEMORY;
537
538 display->wsi_wl = wsi_wl;
539 display->wl_display = wl_display;
540 display->sw = sw;
541
542 display->queue = wl_display_create_queue(wl_display);
543 if (!display->queue) {
544 result = VK_ERROR_OUT_OF_HOST_MEMORY;
545 goto fail;
546 }
547
548 display->wl_display_wrapper = wl_proxy_create_wrapper(wl_display);
549 if (!display->wl_display_wrapper) {
550 result = VK_ERROR_OUT_OF_HOST_MEMORY;
551 goto fail;
552 }
553
554 wl_proxy_set_queue((struct wl_proxy *) display->wl_display_wrapper,
555 display->queue);
556
557 struct wl_registry *registry =
558 wl_display_get_registry(display->wl_display_wrapper);
559 if (!registry) {
560 result = VK_ERROR_OUT_OF_HOST_MEMORY;
561 goto fail;
562 }
563
564 wl_registry_add_listener(registry, ®istry_listener, display);
565
566 /* Round-trip to get wl_shm and zwp_linux_dmabuf_v1 globals */
567 wl_display_roundtrip_queue(display->wl_display, display->queue);
568 if (!display->wl_dmabuf && !display->wl_shm) {
569 result = VK_ERROR_SURFACE_LOST_KHR;
570 goto fail_registry;
571 }
572
573 /* Caller doesn't expect us to query formats/modifiers, so return */
574 if (!get_format_list)
575 goto out;
576
577 /* Round-trip again to get formats and modifiers */
578 wl_display_roundtrip_queue(display->wl_display, display->queue);
579
580 if (wsi_wl->wsi->force_bgra8_unorm_first) {
581 /* Find BGRA8_UNORM in the list and swap it to the first position if we
582 * can find it. Some apps get confused if SRGB is first in the list.
583 */
584 struct wsi_wl_format *first_fmt = u_vector_head(&display->formats);
585 struct wsi_wl_format *f, tmp_fmt;
586 f = find_format(&display->formats, VK_FORMAT_B8G8R8A8_UNORM);
587 if (f) {
588 tmp_fmt = *f;
589 *f = *first_fmt;
590 *first_fmt = tmp_fmt;
591 }
592 }
593
594 out:
595 /* We don't need this anymore */
596 wl_registry_destroy(registry);
597
598 display->refcount = 0;
599
600 return VK_SUCCESS;
601
602 fail_registry:
603 if (registry)
604 wl_registry_destroy(registry);
605
606 fail:
607 wsi_wl_display_finish(display);
608 return result;
609 }
610
611 static VkResult
wsi_wl_display_create(struct wsi_wayland * wsi,struct wl_display * wl_display,bool sw,struct wsi_wl_display ** display_out)612 wsi_wl_display_create(struct wsi_wayland *wsi, struct wl_display *wl_display,
613 bool sw,
614 struct wsi_wl_display **display_out)
615 {
616 struct wsi_wl_display *display =
617 vk_alloc(wsi->alloc, sizeof(*display), 8,
618 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
619 if (!display)
620 return VK_ERROR_OUT_OF_HOST_MEMORY;
621
622 VkResult result = wsi_wl_display_init(wsi, display, wl_display, true,
623 sw);
624 if (result != VK_SUCCESS) {
625 vk_free(wsi->alloc, display);
626 return result;
627 }
628
629 display->refcount++;
630 *display_out = display;
631
632 return result;
633 }
634
635 static struct wsi_wl_display *
wsi_wl_display_ref(struct wsi_wl_display * display)636 wsi_wl_display_ref(struct wsi_wl_display *display)
637 {
638 display->refcount++;
639 return display;
640 }
641
642 static void
wsi_wl_display_unref(struct wsi_wl_display * display)643 wsi_wl_display_unref(struct wsi_wl_display *display)
644 {
645 if (display->refcount-- > 1)
646 return;
647
648 struct wsi_wayland *wsi = display->wsi_wl;
649 wsi_wl_display_finish(display);
650 vk_free(wsi->alloc, display);
651 }
652
653 VKAPI_ATTR VkBool32 VKAPI_CALL
wsi_GetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,struct wl_display * wl_display)654 wsi_GetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,
655 uint32_t queueFamilyIndex,
656 struct wl_display *wl_display)
657 {
658 VK_FROM_HANDLE(vk_physical_device, pdevice, physicalDevice);
659 struct wsi_device *wsi_device = pdevice->wsi_device;
660 struct wsi_wayland *wsi =
661 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
662
663 struct wsi_wl_display display;
664 VkResult ret = wsi_wl_display_init(wsi, &display, wl_display, false,
665 wsi_device->sw);
666 if (ret == VK_SUCCESS)
667 wsi_wl_display_finish(&display);
668
669 return ret == VK_SUCCESS;
670 }
671
672 static VkResult
wsi_wl_surface_get_support(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,uint32_t queueFamilyIndex,VkBool32 * pSupported)673 wsi_wl_surface_get_support(VkIcdSurfaceBase *surface,
674 struct wsi_device *wsi_device,
675 uint32_t queueFamilyIndex,
676 VkBool32* pSupported)
677 {
678 *pSupported = true;
679
680 return VK_SUCCESS;
681 }
682
683 static const VkPresentModeKHR present_modes[] = {
684 VK_PRESENT_MODE_MAILBOX_KHR,
685 VK_PRESENT_MODE_FIFO_KHR,
686 };
687
688 static VkResult
wsi_wl_surface_get_capabilities(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,VkSurfaceCapabilitiesKHR * caps)689 wsi_wl_surface_get_capabilities(VkIcdSurfaceBase *surface,
690 struct wsi_device *wsi_device,
691 VkSurfaceCapabilitiesKHR* caps)
692 {
693 /* For true mailbox mode, we need at least 4 images:
694 * 1) One to scan out from
695 * 2) One to have queued for scan-out
696 * 3) One to be currently held by the Wayland compositor
697 * 4) One to render to
698 */
699 caps->minImageCount = 4;
700 /* There is no real maximum */
701 caps->maxImageCount = 0;
702
703 caps->currentExtent = (VkExtent2D) { UINT32_MAX, UINT32_MAX };
704 caps->minImageExtent = (VkExtent2D) { 1, 1 };
705 caps->maxImageExtent = (VkExtent2D) {
706 wsi_device->maxImageDimension2D,
707 wsi_device->maxImageDimension2D,
708 };
709
710 caps->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
711 caps->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
712 caps->maxImageArrayLayers = 1;
713
714 caps->supportedCompositeAlpha =
715 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
716 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
717
718 caps->supportedUsageFlags =
719 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
720 VK_IMAGE_USAGE_SAMPLED_BIT |
721 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
722 VK_IMAGE_USAGE_STORAGE_BIT |
723 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
724
725 return VK_SUCCESS;
726 }
727
728 static VkResult
wsi_wl_surface_get_capabilities2(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,const void * info_next,VkSurfaceCapabilities2KHR * caps)729 wsi_wl_surface_get_capabilities2(VkIcdSurfaceBase *surface,
730 struct wsi_device *wsi_device,
731 const void *info_next,
732 VkSurfaceCapabilities2KHR* caps)
733 {
734 assert(caps->sType == VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR);
735
736 VkResult result =
737 wsi_wl_surface_get_capabilities(surface, wsi_device,
738 &caps->surfaceCapabilities);
739
740 vk_foreach_struct(ext, caps->pNext) {
741 switch (ext->sType) {
742 case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR: {
743 VkSurfaceProtectedCapabilitiesKHR *protected = (void *)ext;
744 protected->supportsProtected = VK_FALSE;
745 break;
746 }
747
748 default:
749 /* Ignored */
750 break;
751 }
752 }
753
754 return result;
755 }
756
757 static VkResult
wsi_wl_surface_get_formats(VkIcdSurfaceBase * icd_surface,struct wsi_device * wsi_device,uint32_t * pSurfaceFormatCount,VkSurfaceFormatKHR * pSurfaceFormats)758 wsi_wl_surface_get_formats(VkIcdSurfaceBase *icd_surface,
759 struct wsi_device *wsi_device,
760 uint32_t* pSurfaceFormatCount,
761 VkSurfaceFormatKHR* pSurfaceFormats)
762 {
763 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
764 struct wsi_wayland *wsi =
765 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
766
767 struct wsi_wl_display display;
768 if (wsi_wl_display_init(wsi, &display, surface->display, true,
769 wsi_device->sw))
770 return VK_ERROR_SURFACE_LOST_KHR;
771
772 VK_OUTARRAY_MAKE(out, pSurfaceFormats, pSurfaceFormatCount);
773
774 struct wsi_wl_format *disp_fmt;
775 u_vector_foreach(disp_fmt, &display.formats) {
776 /* Skip formats for which we can't support both alpha & opaque
777 * formats.
778 */
779 if (!disp_fmt->has_opaque_format ||
780 !disp_fmt->has_alpha_format)
781 continue;
782
783 vk_outarray_append(&out, out_fmt) {
784 out_fmt->format = disp_fmt->vk_format;
785 out_fmt->colorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
786 }
787 }
788
789 wsi_wl_display_finish(&display);
790
791 return vk_outarray_status(&out);
792 }
793
794 static VkResult
wsi_wl_surface_get_formats2(VkIcdSurfaceBase * icd_surface,struct wsi_device * wsi_device,const void * info_next,uint32_t * pSurfaceFormatCount,VkSurfaceFormat2KHR * pSurfaceFormats)795 wsi_wl_surface_get_formats2(VkIcdSurfaceBase *icd_surface,
796 struct wsi_device *wsi_device,
797 const void *info_next,
798 uint32_t* pSurfaceFormatCount,
799 VkSurfaceFormat2KHR* pSurfaceFormats)
800 {
801 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
802 struct wsi_wayland *wsi =
803 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
804
805 struct wsi_wl_display display;
806 if (wsi_wl_display_init(wsi, &display, surface->display, true,
807 wsi_device->sw))
808 return VK_ERROR_SURFACE_LOST_KHR;
809
810 VK_OUTARRAY_MAKE(out, pSurfaceFormats, pSurfaceFormatCount);
811
812 struct wsi_wl_format *disp_fmt;
813 u_vector_foreach(disp_fmt, &display.formats) {
814 /* Skip formats for which we can't support both alpha & opaque
815 * formats.
816 */
817 if (!disp_fmt->has_opaque_format ||
818 !disp_fmt->has_alpha_format)
819 continue;
820
821 vk_outarray_append(&out, out_fmt) {
822 out_fmt->surfaceFormat.format = disp_fmt->vk_format;
823 out_fmt->surfaceFormat.colorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
824 }
825 }
826
827 wsi_wl_display_finish(&display);
828
829 return vk_outarray_status(&out);
830 }
831
832 static VkResult
wsi_wl_surface_get_present_modes(VkIcdSurfaceBase * surface,uint32_t * pPresentModeCount,VkPresentModeKHR * pPresentModes)833 wsi_wl_surface_get_present_modes(VkIcdSurfaceBase *surface,
834 uint32_t* pPresentModeCount,
835 VkPresentModeKHR* pPresentModes)
836 {
837 if (pPresentModes == NULL) {
838 *pPresentModeCount = ARRAY_SIZE(present_modes);
839 return VK_SUCCESS;
840 }
841
842 *pPresentModeCount = MIN2(*pPresentModeCount, ARRAY_SIZE(present_modes));
843 typed_memcpy(pPresentModes, present_modes, *pPresentModeCount);
844
845 if (*pPresentModeCount < ARRAY_SIZE(present_modes))
846 return VK_INCOMPLETE;
847 else
848 return VK_SUCCESS;
849 }
850
851 static VkResult
wsi_wl_surface_get_present_rectangles(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,uint32_t * pRectCount,VkRect2D * pRects)852 wsi_wl_surface_get_present_rectangles(VkIcdSurfaceBase *surface,
853 struct wsi_device *wsi_device,
854 uint32_t* pRectCount,
855 VkRect2D* pRects)
856 {
857 VK_OUTARRAY_MAKE(out, pRects, pRectCount);
858
859 vk_outarray_append(&out, rect) {
860 /* We don't know a size so just return the usual "I don't know." */
861 *rect = (VkRect2D) {
862 .offset = { 0, 0 },
863 .extent = { UINT32_MAX, UINT32_MAX },
864 };
865 }
866
867 return vk_outarray_status(&out);
868 }
869
870 VKAPI_ATTR VkResult VKAPI_CALL
wsi_CreateWaylandSurfaceKHR(VkInstance _instance,const VkWaylandSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)871 wsi_CreateWaylandSurfaceKHR(VkInstance _instance,
872 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
873 const VkAllocationCallbacks *pAllocator,
874 VkSurfaceKHR *pSurface)
875 {
876 VK_FROM_HANDLE(vk_instance, instance, _instance);
877 VkIcdSurfaceWayland *surface;
878
879 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR);
880
881 surface = vk_alloc2(&instance->alloc, pAllocator, sizeof *surface, 8,
882 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
883 if (surface == NULL)
884 return VK_ERROR_OUT_OF_HOST_MEMORY;
885
886 surface->base.platform = VK_ICD_WSI_PLATFORM_WAYLAND;
887 surface->display = pCreateInfo->display;
888 surface->surface = pCreateInfo->surface;
889
890 *pSurface = VkIcdSurfaceBase_to_handle(&surface->base);
891
892 return VK_SUCCESS;
893 }
894
895 struct wsi_wl_image {
896 struct wsi_image base;
897 struct wl_buffer * buffer;
898 bool busy;
899 void * data_ptr;
900 uint32_t data_size;
901 };
902
903 struct wsi_wl_swapchain {
904 struct wsi_swapchain base;
905
906 struct wsi_wl_display *display;
907
908 struct wl_surface * surface;
909
910 struct wl_callback * frame;
911
912 VkExtent2D extent;
913 VkFormat vk_format;
914 uint32_t drm_format;
915 uint32_t shm_format;
916
917 uint32_t num_drm_modifiers;
918 const uint64_t * drm_modifiers;
919
920 VkPresentModeKHR present_mode;
921 bool fifo_ready;
922
923 struct wsi_wl_image images[0];
924 };
925 VK_DEFINE_NONDISP_HANDLE_CASTS(wsi_wl_swapchain, base.base, VkSwapchainKHR,
926 VK_OBJECT_TYPE_SWAPCHAIN_KHR)
927
928 static struct wsi_image *
wsi_wl_swapchain_get_wsi_image(struct wsi_swapchain * wsi_chain,uint32_t image_index)929 wsi_wl_swapchain_get_wsi_image(struct wsi_swapchain *wsi_chain,
930 uint32_t image_index)
931 {
932 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
933 return &chain->images[image_index].base;
934 }
935
936 static VkResult
wsi_wl_swapchain_acquire_next_image(struct wsi_swapchain * wsi_chain,const VkAcquireNextImageInfoKHR * info,uint32_t * image_index)937 wsi_wl_swapchain_acquire_next_image(struct wsi_swapchain *wsi_chain,
938 const VkAcquireNextImageInfoKHR *info,
939 uint32_t *image_index)
940 {
941 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
942 struct timespec start_time, end_time;
943 struct timespec rel_timeout;
944 int wl_fd = wl_display_get_fd(chain->display->wl_display);
945
946 timespec_from_nsec(&rel_timeout, info->timeout);
947
948 clock_gettime(CLOCK_MONOTONIC, &start_time);
949 timespec_add(&end_time, &rel_timeout, &start_time);
950
951 while (1) {
952 /* Try to dispatch potential events. */
953 int ret = wl_display_dispatch_queue_pending(chain->display->wl_display,
954 chain->display->queue);
955 if (ret < 0)
956 return VK_ERROR_OUT_OF_DATE_KHR;
957
958 /* Try to find a free image. */
959 for (uint32_t i = 0; i < chain->base.image_count; i++) {
960 if (!chain->images[i].busy) {
961 /* We found a non-busy image */
962 *image_index = i;
963 chain->images[i].busy = true;
964 return VK_SUCCESS;
965 }
966 }
967
968 /* Check for timeout. */
969 struct timespec current_time;
970 clock_gettime(CLOCK_MONOTONIC, ¤t_time);
971 if (timespec_after(¤t_time, &end_time))
972 return VK_NOT_READY;
973
974 /* Try to read events from the server. */
975 ret = wl_display_prepare_read_queue(chain->display->wl_display,
976 chain->display->queue);
977 if (ret < 0) {
978 /* Another thread might have read events for our queue already. Go
979 * back to dispatch them.
980 */
981 if (errno == EAGAIN)
982 continue;
983 return VK_ERROR_OUT_OF_DATE_KHR;
984 }
985
986 struct pollfd pollfd = {
987 .fd = wl_fd,
988 .events = POLLIN
989 };
990 timespec_sub(&rel_timeout, &end_time, ¤t_time);
991 ret = ppoll(&pollfd, 1, &rel_timeout, NULL);
992 if (ret <= 0) {
993 int lerrno = errno;
994 wl_display_cancel_read(chain->display->wl_display);
995 if (ret < 0) {
996 /* If ppoll() was interrupted, try again. */
997 if (lerrno == EINTR || lerrno == EAGAIN)
998 continue;
999 return VK_ERROR_OUT_OF_DATE_KHR;
1000 }
1001 assert(ret == 0);
1002 continue;
1003 }
1004
1005 ret = wl_display_read_events(chain->display->wl_display);
1006 if (ret < 0)
1007 return VK_ERROR_OUT_OF_DATE_KHR;
1008 }
1009 }
1010
1011 static void
frame_handle_done(void * data,struct wl_callback * callback,uint32_t serial)1012 frame_handle_done(void *data, struct wl_callback *callback, uint32_t serial)
1013 {
1014 struct wsi_wl_swapchain *chain = data;
1015
1016 chain->frame = NULL;
1017 chain->fifo_ready = true;
1018
1019 wl_callback_destroy(callback);
1020 }
1021
1022 static const struct wl_callback_listener frame_listener = {
1023 frame_handle_done,
1024 };
1025
1026 static VkResult
wsi_wl_swapchain_queue_present(struct wsi_swapchain * wsi_chain,uint32_t image_index,const VkPresentRegionKHR * damage)1027 wsi_wl_swapchain_queue_present(struct wsi_swapchain *wsi_chain,
1028 uint32_t image_index,
1029 const VkPresentRegionKHR *damage)
1030 {
1031 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
1032
1033 if (chain->display->sw) {
1034 struct wsi_wl_image *image = &chain->images[image_index];
1035 void *dptr = image->data_ptr;
1036 void *sptr;
1037 chain->base.wsi->MapMemory(chain->base.device,
1038 image->base.memory,
1039 0, 0, 0, &sptr);
1040
1041 for (unsigned r = 0; r < chain->extent.height; r++) {
1042 memcpy(dptr, sptr, image->base.row_pitches[0]);
1043 dptr += image->base.row_pitches[0];
1044 sptr += image->base.row_pitches[0];
1045 }
1046 chain->base.wsi->UnmapMemory(chain->base.device,
1047 image->base.memory);
1048
1049 }
1050 if (chain->base.present_mode == VK_PRESENT_MODE_FIFO_KHR) {
1051 while (!chain->fifo_ready) {
1052 int ret = wl_display_dispatch_queue(chain->display->wl_display,
1053 chain->display->queue);
1054 if (ret < 0)
1055 return VK_ERROR_OUT_OF_DATE_KHR;
1056 }
1057 }
1058
1059 assert(image_index < chain->base.image_count);
1060 wl_surface_attach(chain->surface, chain->images[image_index].buffer, 0, 0);
1061
1062 if (wl_surface_get_version(chain->surface) >= 4 && damage &&
1063 damage->pRectangles && damage->rectangleCount > 0) {
1064 for (unsigned i = 0; i < damage->rectangleCount; i++) {
1065 const VkRectLayerKHR *rect = &damage->pRectangles[i];
1066 assert(rect->layer == 0);
1067 wl_surface_damage_buffer(chain->surface,
1068 rect->offset.x, rect->offset.y,
1069 rect->extent.width, rect->extent.height);
1070 }
1071 } else {
1072 wl_surface_damage(chain->surface, 0, 0, INT32_MAX, INT32_MAX);
1073 }
1074
1075 if (chain->base.present_mode == VK_PRESENT_MODE_FIFO_KHR) {
1076 chain->frame = wl_surface_frame(chain->surface);
1077 wl_callback_add_listener(chain->frame, &frame_listener, chain);
1078 chain->fifo_ready = false;
1079 }
1080
1081 chain->images[image_index].busy = true;
1082 wl_surface_commit(chain->surface);
1083 wl_display_flush(chain->display->wl_display);
1084
1085 return VK_SUCCESS;
1086 }
1087
1088 static void
buffer_handle_release(void * data,struct wl_buffer * buffer)1089 buffer_handle_release(void *data, struct wl_buffer *buffer)
1090 {
1091 struct wsi_wl_image *image = data;
1092
1093 assert(image->buffer == buffer);
1094
1095 image->busy = false;
1096 }
1097
1098 static const struct wl_buffer_listener buffer_listener = {
1099 buffer_handle_release,
1100 };
1101
1102 static VkResult
wsi_wl_image_init(struct wsi_wl_swapchain * chain,struct wsi_wl_image * image,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator)1103 wsi_wl_image_init(struct wsi_wl_swapchain *chain,
1104 struct wsi_wl_image *image,
1105 const VkSwapchainCreateInfoKHR *pCreateInfo,
1106 const VkAllocationCallbacks* pAllocator)
1107 {
1108 struct wsi_wl_display *display = chain->display;
1109 VkResult result;
1110
1111 memset(image, 0, sizeof(*image));
1112
1113 result = wsi_create_native_image(&chain->base, pCreateInfo,
1114 chain->num_drm_modifiers > 0 ? 1 : 0,
1115 &chain->num_drm_modifiers,
1116 &chain->drm_modifiers, NULL, &image->base);
1117
1118 if (result != VK_SUCCESS)
1119 return result;
1120
1121 if (display->sw) {
1122 int fd, stride;
1123
1124 stride = image->base.row_pitches[0];
1125 image->data_size = stride * chain->extent.height;
1126
1127 /* Create a shareable buffer */
1128 fd = os_create_anonymous_file(image->data_size, NULL);
1129 if (fd < 0)
1130 goto fail_image;
1131
1132 image->data_ptr = mmap(NULL, image->data_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
1133 if (image->data_ptr == MAP_FAILED) {
1134 close(fd);
1135 goto fail_image;
1136 }
1137 /* Share it in a wl_buffer */
1138 struct wl_shm_pool *pool = wl_shm_create_pool(display->wl_shm, fd, image->data_size);
1139 wl_proxy_set_queue((struct wl_proxy *)pool, display->queue);
1140 image->buffer = wl_shm_pool_create_buffer(pool, 0, chain->extent.width,
1141 chain->extent.height, stride,
1142 chain->shm_format);
1143 wl_shm_pool_destroy(pool);
1144 close(fd);
1145 } else {
1146 assert(display->wl_dmabuf);
1147
1148 struct zwp_linux_buffer_params_v1 *params =
1149 zwp_linux_dmabuf_v1_create_params(display->wl_dmabuf);
1150 if (!params)
1151 goto fail_image;
1152
1153 for (int i = 0; i < image->base.num_planes; i++) {
1154 zwp_linux_buffer_params_v1_add(params,
1155 image->base.fds[i],
1156 i,
1157 image->base.offsets[i],
1158 image->base.row_pitches[i],
1159 image->base.drm_modifier >> 32,
1160 image->base.drm_modifier & 0xffffffff);
1161 close(image->base.fds[i]);
1162 }
1163
1164 image->buffer =
1165 zwp_linux_buffer_params_v1_create_immed(params,
1166 chain->extent.width,
1167 chain->extent.height,
1168 chain->drm_format,
1169 0);
1170 zwp_linux_buffer_params_v1_destroy(params);
1171 }
1172
1173 if (!image->buffer)
1174 goto fail_image;
1175
1176 wl_buffer_add_listener(image->buffer, &buffer_listener, image);
1177
1178 return VK_SUCCESS;
1179
1180 fail_image:
1181 wsi_destroy_image(&chain->base, &image->base);
1182
1183 return VK_ERROR_OUT_OF_HOST_MEMORY;
1184 }
1185
1186 static VkResult
wsi_wl_swapchain_destroy(struct wsi_swapchain * wsi_chain,const VkAllocationCallbacks * pAllocator)1187 wsi_wl_swapchain_destroy(struct wsi_swapchain *wsi_chain,
1188 const VkAllocationCallbacks *pAllocator)
1189 {
1190 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
1191
1192 for (uint32_t i = 0; i < chain->base.image_count; i++) {
1193 if (chain->images[i].buffer) {
1194 wl_buffer_destroy(chain->images[i].buffer);
1195 wsi_destroy_image(&chain->base, &chain->images[i].base);
1196 if (chain->images[i].data_ptr)
1197 munmap(chain->images[i].data_ptr, chain->images[i].data_size);
1198 }
1199 }
1200
1201 if (chain->frame)
1202 wl_callback_destroy(chain->frame);
1203 if (chain->surface)
1204 wl_proxy_wrapper_destroy(chain->surface);
1205
1206 if (chain->display)
1207 wsi_wl_display_unref(chain->display);
1208
1209 wsi_swapchain_finish(&chain->base);
1210
1211 vk_free(pAllocator, chain);
1212
1213 return VK_SUCCESS;
1214 }
1215
1216 static VkResult
wsi_wl_surface_create_swapchain(VkIcdSurfaceBase * icd_surface,VkDevice device,struct wsi_device * wsi_device,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,struct wsi_swapchain ** swapchain_out)1217 wsi_wl_surface_create_swapchain(VkIcdSurfaceBase *icd_surface,
1218 VkDevice device,
1219 struct wsi_device *wsi_device,
1220 const VkSwapchainCreateInfoKHR* pCreateInfo,
1221 const VkAllocationCallbacks* pAllocator,
1222 struct wsi_swapchain **swapchain_out)
1223 {
1224 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
1225 struct wsi_wayland *wsi =
1226 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
1227 struct wsi_wl_swapchain *chain;
1228 VkResult result;
1229
1230 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR);
1231
1232 int num_images = pCreateInfo->minImageCount;
1233
1234 size_t size = sizeof(*chain) + num_images * sizeof(chain->images[0]);
1235 chain = vk_zalloc(pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1236 if (chain == NULL)
1237 return VK_ERROR_OUT_OF_HOST_MEMORY;
1238
1239 result = wsi_swapchain_init(wsi_device, &chain->base, device,
1240 pCreateInfo, pAllocator);
1241 if (result != VK_SUCCESS) {
1242 vk_free(pAllocator, chain);
1243 return result;
1244 }
1245
1246 bool alpha = pCreateInfo->compositeAlpha ==
1247 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
1248
1249 chain->base.destroy = wsi_wl_swapchain_destroy;
1250 chain->base.get_wsi_image = wsi_wl_swapchain_get_wsi_image;
1251 chain->base.acquire_next_image = wsi_wl_swapchain_acquire_next_image;
1252 chain->base.queue_present = wsi_wl_swapchain_queue_present;
1253 chain->base.present_mode = wsi_swapchain_get_present_mode(wsi_device, pCreateInfo);
1254 chain->base.image_count = num_images;
1255 chain->extent = pCreateInfo->imageExtent;
1256 chain->vk_format = pCreateInfo->imageFormat;
1257 if (wsi_device->sw)
1258 chain->shm_format = wl_shm_format_for_vk_format(chain->vk_format, alpha);
1259 else
1260 chain->drm_format = wl_drm_format_for_vk_format(chain->vk_format, alpha);
1261
1262 if (pCreateInfo->oldSwapchain) {
1263 /* If we have an oldSwapchain parameter, copy the display struct over
1264 * from the old one so we don't have to fully re-initialize it.
1265 */
1266 VK_FROM_HANDLE(wsi_wl_swapchain, old_chain, pCreateInfo->oldSwapchain);
1267 chain->display = wsi_wl_display_ref(old_chain->display);
1268 } else {
1269 chain->display = NULL;
1270 result = wsi_wl_display_create(wsi, surface->display,
1271 wsi_device->sw, &chain->display);
1272 if (result != VK_SUCCESS)
1273 goto fail;
1274 }
1275
1276 chain->surface = wl_proxy_create_wrapper(surface->surface);
1277 if (!chain->surface) {
1278 result = VK_ERROR_OUT_OF_HOST_MEMORY;
1279 goto fail;
1280 }
1281 wl_proxy_set_queue((struct wl_proxy *) chain->surface,
1282 chain->display->queue);
1283
1284 chain->num_drm_modifiers = 0;
1285 chain->drm_modifiers = 0;
1286
1287 /* Use explicit DRM format modifiers when both the server and the driver
1288 * support them.
1289 */
1290 if (chain->display->wl_dmabuf && chain->base.wsi->supports_modifiers) {
1291 struct wsi_wl_format *f = find_format(&chain->display->formats, chain->vk_format);
1292 if (f) {
1293 chain->drm_modifiers = u_vector_tail(&f->modifiers);
1294 chain->num_drm_modifiers = u_vector_length(&f->modifiers);
1295 }
1296 }
1297
1298 chain->fifo_ready = true;
1299
1300 for (uint32_t i = 0; i < chain->base.image_count; i++) {
1301 result = wsi_wl_image_init(chain, &chain->images[i],
1302 pCreateInfo, pAllocator);
1303 if (result != VK_SUCCESS)
1304 goto fail;
1305 chain->images[i].busy = false;
1306 }
1307
1308 *swapchain_out = &chain->base;
1309
1310 return VK_SUCCESS;
1311
1312 fail:
1313 wsi_wl_swapchain_destroy(&chain->base, pAllocator);
1314
1315 return result;
1316 }
1317
1318 VkResult
wsi_wl_init_wsi(struct wsi_device * wsi_device,const VkAllocationCallbacks * alloc,VkPhysicalDevice physical_device)1319 wsi_wl_init_wsi(struct wsi_device *wsi_device,
1320 const VkAllocationCallbacks *alloc,
1321 VkPhysicalDevice physical_device)
1322 {
1323 struct wsi_wayland *wsi;
1324 VkResult result;
1325
1326 wsi = vk_alloc(alloc, sizeof(*wsi), 8,
1327 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1328 if (!wsi) {
1329 result = VK_ERROR_OUT_OF_HOST_MEMORY;
1330 goto fail;
1331 }
1332
1333 wsi->physical_device = physical_device;
1334 wsi->alloc = alloc;
1335 wsi->wsi = wsi_device;
1336
1337 wsi->base.get_support = wsi_wl_surface_get_support;
1338 wsi->base.get_capabilities2 = wsi_wl_surface_get_capabilities2;
1339 wsi->base.get_formats = wsi_wl_surface_get_formats;
1340 wsi->base.get_formats2 = wsi_wl_surface_get_formats2;
1341 wsi->base.get_present_modes = wsi_wl_surface_get_present_modes;
1342 wsi->base.get_present_rectangles = wsi_wl_surface_get_present_rectangles;
1343 wsi->base.create_swapchain = wsi_wl_surface_create_swapchain;
1344
1345 wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = &wsi->base;
1346
1347 return VK_SUCCESS;
1348
1349 fail:
1350 wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = NULL;
1351
1352 return result;
1353 }
1354
1355 void
wsi_wl_finish_wsi(struct wsi_device * wsi_device,const VkAllocationCallbacks * alloc)1356 wsi_wl_finish_wsi(struct wsi_device *wsi_device,
1357 const VkAllocationCallbacks *alloc)
1358 {
1359 struct wsi_wayland *wsi =
1360 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
1361 if (!wsi)
1362 return;
1363
1364 vk_free(alloc, wsi);
1365 }
1366