1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <wayland-client.h>
25
26 #include <assert.h>
27 #include <stdlib.h>
28 #include <stdio.h>
29 #include <unistd.h>
30 #include <errno.h>
31 #include <string.h>
32 #include <pthread.h>
33 #include <poll.h>
34 #include <sys/mman.h>
35
36 #include "drm-uapi/drm_fourcc.h"
37
38 #include "vk_instance.h"
39 #include "vk_physical_device.h"
40 #include "vk_util.h"
41 #include "wsi_common_entrypoints.h"
42 #include "wsi_common_private.h"
43 #include "linux-dmabuf-unstable-v1-client-protocol.h"
44
45 #include <util/compiler.h>
46 #include <util/hash_table.h>
47 #include <util/timespec.h>
48 #include <util/u_vector.h>
49 #include <util/anon_file.h>
50
51 struct wsi_wayland;
52
53 struct wsi_wl_format {
54 VkFormat vk_format;
55 uint32_t flags;
56 struct u_vector modifiers;
57 };
58
59 struct wsi_wl_display {
60 /* The real wl_display */
61 struct wl_display * wl_display;
62 /* Actually a proxy wrapper around the event queue */
63 struct wl_display * wl_display_wrapper;
64 struct wl_event_queue * queue;
65
66 struct wl_shm * wl_shm;
67 struct zwp_linux_dmabuf_v1 * wl_dmabuf;
68
69 struct wsi_wayland *wsi_wl;
70
71 /* Formats populated by zwp_linux_dmabuf_v1 or wl_shm interfaces */
72 struct u_vector formats;
73
74 /* Only used for displays created by wsi_wl_display_create */
75 uint32_t refcount;
76
77 bool sw;
78 };
79
80 struct wsi_wayland {
81 struct wsi_interface base;
82
83 struct wsi_device *wsi;
84
85 const VkAllocationCallbacks *alloc;
86 VkPhysicalDevice physical_device;
87 };
88
89 enum wsi_wl_fmt_flag {
90 WSI_WL_FMT_ALPHA = 1 << 0,
91 WSI_WL_FMT_OPAQUE = 1 << 1,
92 };
93
94 static struct wsi_wl_format *
find_format(struct u_vector * formats,VkFormat format)95 find_format(struct u_vector *formats, VkFormat format)
96 {
97 struct wsi_wl_format *f;
98
99 u_vector_foreach(f, formats)
100 if (f->vk_format == format)
101 return f;
102
103 return NULL;
104 }
105
106 static struct wsi_wl_format *
wsi_wl_display_add_vk_format(struct wsi_wl_display * display,struct u_vector * formats,VkFormat format,uint32_t flags)107 wsi_wl_display_add_vk_format(struct wsi_wl_display *display,
108 struct u_vector *formats,
109 VkFormat format, uint32_t flags)
110 {
111 assert(flags & (WSI_WL_FMT_ALPHA | WSI_WL_FMT_OPAQUE));
112
113 /* Don't add a format that's already in the list */
114 struct wsi_wl_format *f = find_format(formats, format);
115 if (f) {
116 f->flags |= flags;
117 return f;
118 }
119
120 /* Don't add formats that aren't renderable. */
121 VkFormatProperties props;
122
123 display->wsi_wl->wsi->GetPhysicalDeviceFormatProperties(display->wsi_wl->physical_device,
124 format, &props);
125 if (!(props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
126 return NULL;
127
128 struct u_vector modifiers;
129 if (!u_vector_init_pow2(&modifiers, 4, sizeof(uint64_t)))
130 return NULL;
131
132 f = u_vector_add(formats);
133 if (!f) {
134 u_vector_finish(&modifiers);
135 return NULL;
136 }
137
138 f->vk_format = format;
139 f->flags = flags;
140 f->modifiers = modifiers;
141
142 return f;
143 }
144
145 static void
wsi_wl_format_add_modifier(struct wsi_wl_format * format,uint64_t modifier)146 wsi_wl_format_add_modifier(struct wsi_wl_format *format, uint64_t modifier)
147 {
148 uint64_t *mod;
149
150 if (modifier == DRM_FORMAT_MOD_INVALID)
151 return;
152
153 u_vector_foreach(mod, &format->modifiers)
154 if (*mod == modifier)
155 return;
156
157 mod = u_vector_add(&format->modifiers);
158 if (mod)
159 *mod = modifier;
160 }
161
162 static void
wsi_wl_display_add_vk_format_modifier(struct wsi_wl_display * display,struct u_vector * formats,VkFormat vk_format,uint32_t flags,uint64_t modifier)163 wsi_wl_display_add_vk_format_modifier(struct wsi_wl_display *display,
164 struct u_vector *formats,
165 VkFormat vk_format, uint32_t flags,
166 uint64_t modifier)
167 {
168 struct wsi_wl_format *format;
169
170 format = wsi_wl_display_add_vk_format(display, formats, vk_format, flags);
171 if (format)
172 wsi_wl_format_add_modifier(format, modifier);
173 }
174
175 static void
wsi_wl_display_add_drm_format_modifier(struct wsi_wl_display * display,struct u_vector * formats,uint32_t drm_format,uint64_t modifier)176 wsi_wl_display_add_drm_format_modifier(struct wsi_wl_display *display,
177 struct u_vector *formats,
178 uint32_t drm_format, uint64_t modifier)
179 {
180 switch (drm_format) {
181 #if 0
182 /* TODO: These are only available when VK_EXT_4444_formats is enabled, so
183 * we probably need to make their use conditional on this extension. */
184 case DRM_FORMAT_ARGB4444:
185 wsi_wl_display_add_vk_format_modifier(display, formats,
186 VK_FORMAT_A4R4G4B4_UNORM_PACK16,
187 WSI_WL_FMT_ALPHA, modifier);
188 break;
189 case DRM_FORMAT_XRGB4444:
190 wsi_wl_display_add_vk_format_modifier(display, formats,
191 VK_FORMAT_A4R4G4B4_UNORM_PACK16,
192 WSI_WL_FMT_OPAQUE, modifier);
193 break;
194 case DRM_FORMAT_ABGR4444:
195 wsi_wl_display_add_vk_format_modifier(display, formats,
196 VK_FORMAT_A4B4G4R4_UNORM_PACK16,
197 WSI_WL_FMT_ALPHA, modifier);
198 break;
199 case DRM_FORMAT_XBGR4444:
200 wsi_wl_display_add_vk_format_modifier(display, formats,
201 VK_FORMAT_A4B4G4R4_UNORM_PACK16,
202 WSI_WL_FMT_OPAQUE, modifier);
203 break;
204 #endif
205
206 /* Vulkan _PACKN formats have the same component order as DRM formats
207 * on little endian systems, on big endian there exists no analog. */
208 #if MESA_LITTLE_ENDIAN
209 case DRM_FORMAT_RGBA4444:
210 wsi_wl_display_add_vk_format_modifier(display, formats,
211 VK_FORMAT_R4G4B4A4_UNORM_PACK16,
212 WSI_WL_FMT_ALPHA, modifier);
213 break;
214 case DRM_FORMAT_RGBX4444:
215 wsi_wl_display_add_vk_format_modifier(display, formats,
216 VK_FORMAT_R4G4B4A4_UNORM_PACK16,
217 WSI_WL_FMT_OPAQUE, modifier);
218 break;
219 case DRM_FORMAT_BGRA4444:
220 wsi_wl_display_add_vk_format_modifier(display, formats,
221 VK_FORMAT_B4G4R4A4_UNORM_PACK16,
222 WSI_WL_FMT_ALPHA, modifier);
223 break;
224 case DRM_FORMAT_BGRX4444:
225 wsi_wl_display_add_vk_format_modifier(display, formats,
226 VK_FORMAT_B4G4R4A4_UNORM_PACK16,
227 WSI_WL_FMT_OPAQUE, modifier);
228 break;
229 case DRM_FORMAT_RGB565:
230 wsi_wl_display_add_vk_format_modifier(display, formats,
231 VK_FORMAT_R5G6B5_UNORM_PACK16,
232 WSI_WL_FMT_ALPHA | WSI_WL_FMT_OPAQUE,
233 modifier);
234 break;
235 case DRM_FORMAT_BGR565:
236 wsi_wl_display_add_vk_format_modifier(display, formats,
237 VK_FORMAT_B5G6R5_UNORM_PACK16,
238 WSI_WL_FMT_ALPHA | WSI_WL_FMT_OPAQUE,
239 modifier);
240 break;
241 case DRM_FORMAT_ARGB1555:
242 wsi_wl_display_add_vk_format_modifier(display, formats,
243 VK_FORMAT_A1R5G5B5_UNORM_PACK16,
244 WSI_WL_FMT_ALPHA, modifier);
245 break;
246 case DRM_FORMAT_XRGB1555:
247 wsi_wl_display_add_vk_format_modifier(display, formats,
248 VK_FORMAT_A1R5G5B5_UNORM_PACK16,
249 WSI_WL_FMT_OPAQUE, modifier);
250 break;
251 case DRM_FORMAT_RGBA5551:
252 wsi_wl_display_add_vk_format_modifier(display, formats,
253 VK_FORMAT_R5G5B5A1_UNORM_PACK16,
254 WSI_WL_FMT_ALPHA, modifier);
255 break;
256 case DRM_FORMAT_RGBX5551:
257 wsi_wl_display_add_vk_format_modifier(display, formats,
258 VK_FORMAT_R5G5B5A1_UNORM_PACK16,
259 WSI_WL_FMT_OPAQUE, modifier);
260 break;
261 case DRM_FORMAT_BGRA5551:
262 wsi_wl_display_add_vk_format_modifier(display, formats,
263 VK_FORMAT_B5G5R5A1_UNORM_PACK16,
264 WSI_WL_FMT_ALPHA, modifier);
265 break;
266 case DRM_FORMAT_BGRX5551:
267 wsi_wl_display_add_vk_format_modifier(display, formats,
268 VK_FORMAT_B5G5R5A1_UNORM_PACK16,
269 WSI_WL_FMT_OPAQUE, modifier);
270 break;
271 case DRM_FORMAT_ARGB2101010:
272 wsi_wl_display_add_vk_format_modifier(display, formats,
273 VK_FORMAT_A2R10G10B10_UNORM_PACK32,
274 WSI_WL_FMT_ALPHA, modifier);
275 break;
276 case DRM_FORMAT_XRGB2101010:
277 wsi_wl_display_add_vk_format_modifier(display, formats,
278 VK_FORMAT_A2R10G10B10_UNORM_PACK32,
279 WSI_WL_FMT_OPAQUE, modifier);
280 break;
281 case DRM_FORMAT_ABGR2101010:
282 wsi_wl_display_add_vk_format_modifier(display, formats,
283 VK_FORMAT_A2B10G10R10_UNORM_PACK32,
284 WSI_WL_FMT_ALPHA, modifier);
285 break;
286 case DRM_FORMAT_XBGR2101010:
287 wsi_wl_display_add_vk_format_modifier(display, formats,
288 VK_FORMAT_A2B10G10R10_UNORM_PACK32,
289 WSI_WL_FMT_OPAQUE, modifier);
290 break;
291 #endif
292
293 /* Non-packed 8-bit formats have an inverted channel order compared to the
294 * little endian DRM formats, because the DRM channel ordering is high->low
295 * but the vulkan channel ordering is in memory byte order
296 *
297 * For all UNORM formats which have a SRGB variant, we must support both if
298 * we can. SRGB in this context means that rendering to it will result in a
299 * linear -> nonlinear SRGB colorspace conversion before the data is stored.
300 * The inverse function is applied when sampling from SRGB images.
301 * From Wayland's perspective nothing changes, the difference is just how
302 * Vulkan interprets the pixel data. */
303 case DRM_FORMAT_XBGR8888:
304 wsi_wl_display_add_vk_format_modifier(display, formats,
305 VK_FORMAT_R8G8B8_SRGB,
306 WSI_WL_FMT_ALPHA | WSI_WL_FMT_OPAQUE,
307 modifier);
308 wsi_wl_display_add_vk_format_modifier(display, formats,
309 VK_FORMAT_R8G8B8_UNORM,
310 WSI_WL_FMT_ALPHA | WSI_WL_FMT_OPAQUE,
311 modifier);
312 wsi_wl_display_add_vk_format_modifier(display, formats,
313 VK_FORMAT_R8G8B8A8_SRGB,
314 WSI_WL_FMT_OPAQUE, modifier);
315 wsi_wl_display_add_vk_format_modifier(display, formats,
316 VK_FORMAT_R8G8B8A8_UNORM,
317 WSI_WL_FMT_OPAQUE, modifier);
318 break;
319 case DRM_FORMAT_ABGR8888:
320 wsi_wl_display_add_vk_format_modifier(display, formats,
321 VK_FORMAT_R8G8B8A8_SRGB,
322 WSI_WL_FMT_ALPHA, modifier);
323 wsi_wl_display_add_vk_format_modifier(display, formats,
324 VK_FORMAT_R8G8B8A8_UNORM,
325 WSI_WL_FMT_ALPHA, modifier);
326 break;
327 case DRM_FORMAT_XRGB8888:
328 wsi_wl_display_add_vk_format_modifier(display, formats,
329 VK_FORMAT_B8G8R8_SRGB,
330 WSI_WL_FMT_ALPHA | WSI_WL_FMT_OPAQUE,
331 modifier);
332 wsi_wl_display_add_vk_format_modifier(display, formats,
333 VK_FORMAT_B8G8R8_UNORM,
334 WSI_WL_FMT_ALPHA | WSI_WL_FMT_OPAQUE,
335 modifier);
336 wsi_wl_display_add_vk_format_modifier(display, formats,
337 VK_FORMAT_B8G8R8A8_SRGB,
338 WSI_WL_FMT_OPAQUE, modifier);
339 wsi_wl_display_add_vk_format_modifier(display, formats,
340 VK_FORMAT_B8G8R8A8_UNORM,
341 WSI_WL_FMT_OPAQUE, modifier);
342 break;
343 case DRM_FORMAT_ARGB8888:
344 wsi_wl_display_add_vk_format_modifier(display, formats,
345 VK_FORMAT_B8G8R8A8_SRGB,
346 WSI_WL_FMT_ALPHA, modifier);
347 wsi_wl_display_add_vk_format_modifier(display, formats,
348 VK_FORMAT_B8G8R8A8_UNORM,
349 WSI_WL_FMT_ALPHA, modifier);
350 break;
351 }
352 }
353
354 static uint32_t
drm_format_for_wl_shm_format(enum wl_shm_format shm_format)355 drm_format_for_wl_shm_format(enum wl_shm_format shm_format)
356 {
357 /* wl_shm formats are identical to DRM, except ARGB8888 and XRGB8888 */
358 switch (shm_format) {
359 case WL_SHM_FORMAT_ARGB8888:
360 return DRM_FORMAT_ARGB8888;
361 case WL_SHM_FORMAT_XRGB8888:
362 return DRM_FORMAT_XRGB8888;
363 default:
364 return shm_format;
365 }
366 }
367
368 static void
wsi_wl_display_add_wl_shm_format(struct wsi_wl_display * display,struct u_vector * formats,enum wl_shm_format shm_format)369 wsi_wl_display_add_wl_shm_format(struct wsi_wl_display *display,
370 struct u_vector *formats,
371 enum wl_shm_format shm_format)
372 {
373 uint32_t drm_format = drm_format_for_wl_shm_format(shm_format);
374
375 wsi_wl_display_add_drm_format_modifier(display, formats, drm_format,
376 DRM_FORMAT_MOD_INVALID);
377 }
378
379 static uint32_t
wl_drm_format_for_vk_format(VkFormat vk_format,bool alpha)380 wl_drm_format_for_vk_format(VkFormat vk_format, bool alpha)
381 {
382 switch (vk_format) {
383 #if 0
384 case VK_FORMAT_A4R4G4B4_UNORM_PACK16:
385 return alpha ? DRM_FORMAT_ARGB4444 : DRM_FORMAT_XRGB4444;
386 case VK_FORMAT_A4B4G4R4_UNORM_PACK16:
387 return alpha ? DRM_FORMAT_ABGR4444 : DRM_FORMAT_XBGR4444;
388 #endif
389 #if MESA_LITTLE_ENDIAN
390 case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
391 return alpha ? DRM_FORMAT_RGBA4444 : DRM_FORMAT_RGBX4444;
392 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
393 return alpha ? DRM_FORMAT_BGRA4444 : DRM_FORMAT_BGRX4444;
394 case VK_FORMAT_R5G6B5_UNORM_PACK16:
395 return DRM_FORMAT_RGB565;
396 case VK_FORMAT_B5G6R5_UNORM_PACK16:
397 return DRM_FORMAT_BGR565;
398 case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
399 return alpha ? DRM_FORMAT_ARGB1555 : DRM_FORMAT_XRGB1555;
400 case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
401 return alpha ? DRM_FORMAT_RGBA5551 : DRM_FORMAT_RGBX5551;
402 case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
403 return alpha ? DRM_FORMAT_BGRA5551 : DRM_FORMAT_BGRX5551;
404 case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
405 return alpha ? DRM_FORMAT_ARGB2101010 : DRM_FORMAT_XRGB2101010;
406 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
407 return alpha ? DRM_FORMAT_ABGR2101010 : DRM_FORMAT_XBGR2101010;
408 #endif
409 case VK_FORMAT_R8G8B8_UNORM:
410 case VK_FORMAT_R8G8B8_SRGB:
411 return DRM_FORMAT_XBGR8888;
412 case VK_FORMAT_R8G8B8A8_UNORM:
413 case VK_FORMAT_R8G8B8A8_SRGB:
414 return alpha ? DRM_FORMAT_ABGR8888 : DRM_FORMAT_XBGR8888;
415 case VK_FORMAT_B8G8R8_UNORM:
416 case VK_FORMAT_B8G8R8_SRGB:
417 return DRM_FORMAT_BGRX8888;
418 case VK_FORMAT_B8G8R8A8_UNORM:
419 case VK_FORMAT_B8G8R8A8_SRGB:
420 return alpha ? DRM_FORMAT_ARGB8888 : DRM_FORMAT_XRGB8888;
421
422 default:
423 assert(!"Unsupported Vulkan format");
424 return DRM_FORMAT_INVALID;
425 }
426 }
427
428 static enum wl_shm_format
wl_shm_format_for_vk_format(VkFormat vk_format,bool alpha)429 wl_shm_format_for_vk_format(VkFormat vk_format, bool alpha)
430 {
431 uint32_t drm_format = wl_drm_format_for_vk_format(vk_format, alpha);
432 if (drm_format == DRM_FORMAT_INVALID) {
433 return 0;
434 }
435
436 /* wl_shm formats are identical to DRM, except ARGB8888 and XRGB8888 */
437 switch (drm_format) {
438 case DRM_FORMAT_ARGB8888:
439 return WL_SHM_FORMAT_ARGB8888;
440 case DRM_FORMAT_XRGB8888:
441 return WL_SHM_FORMAT_XRGB8888;
442 default:
443 return drm_format;
444 }
445 }
446
447 static void
dmabuf_handle_format(void * data,struct zwp_linux_dmabuf_v1 * dmabuf,uint32_t format)448 dmabuf_handle_format(void *data, struct zwp_linux_dmabuf_v1 *dmabuf,
449 uint32_t format)
450 {
451 /* Formats are implicitly advertised by the modifier event, so we ignore
452 * them here. */
453 }
454
455 static void
dmabuf_handle_modifier(void * data,struct zwp_linux_dmabuf_v1 * dmabuf,uint32_t format,uint32_t modifier_hi,uint32_t modifier_lo)456 dmabuf_handle_modifier(void *data, struct zwp_linux_dmabuf_v1 *dmabuf,
457 uint32_t format, uint32_t modifier_hi,
458 uint32_t modifier_lo)
459 {
460 struct wsi_wl_display *display = data;
461 uint64_t modifier;
462
463 modifier = ((uint64_t) modifier_hi << 32) | modifier_lo;
464 wsi_wl_display_add_drm_format_modifier(display, &display->formats,
465 format, modifier);
466 }
467
468 static const struct zwp_linux_dmabuf_v1_listener dmabuf_listener = {
469 dmabuf_handle_format,
470 dmabuf_handle_modifier,
471 };
472
473 static void
shm_handle_format(void * data,struct wl_shm * shm,uint32_t format)474 shm_handle_format(void *data, struct wl_shm *shm, uint32_t format)
475 {
476 struct wsi_wl_display *display = data;
477
478 wsi_wl_display_add_wl_shm_format(display, &display->formats, format);
479 }
480
481 static const struct wl_shm_listener shm_listener = {
482 .format = shm_handle_format
483 };
484
485 static void
registry_handle_global(void * data,struct wl_registry * registry,uint32_t name,const char * interface,uint32_t version)486 registry_handle_global(void *data, struct wl_registry *registry,
487 uint32_t name, const char *interface, uint32_t version)
488 {
489 struct wsi_wl_display *display = data;
490
491 if (display->sw) {
492 if (strcmp(interface, "wl_shm") == 0) {
493 display->wl_shm = wl_registry_bind(registry, name, &wl_shm_interface, 1);
494 wl_shm_add_listener(display->wl_shm, &shm_listener, display);
495 }
496 return;
497 }
498
499 if (strcmp(interface, "zwp_linux_dmabuf_v1") == 0 && version >= 3) {
500 display->wl_dmabuf =
501 wl_registry_bind(registry, name, &zwp_linux_dmabuf_v1_interface, 3);
502 zwp_linux_dmabuf_v1_add_listener(display->wl_dmabuf,
503 &dmabuf_listener, display);
504 }
505 }
506
507 static void
registry_handle_global_remove(void * data,struct wl_registry * registry,uint32_t name)508 registry_handle_global_remove(void *data, struct wl_registry *registry,
509 uint32_t name)
510 { /* No-op */ }
511
512 static const struct wl_registry_listener registry_listener = {
513 registry_handle_global,
514 registry_handle_global_remove
515 };
516
517 static void
wsi_wl_display_finish(struct wsi_wl_display * display)518 wsi_wl_display_finish(struct wsi_wl_display *display)
519 {
520 assert(display->refcount == 0);
521
522 struct wsi_wl_format *f;
523 u_vector_foreach(f, &display->formats)
524 u_vector_finish(&f->modifiers);
525 u_vector_finish(&display->formats);
526 if (display->wl_shm)
527 wl_shm_destroy(display->wl_shm);
528 if (display->wl_dmabuf)
529 zwp_linux_dmabuf_v1_destroy(display->wl_dmabuf);
530 if (display->wl_display_wrapper)
531 wl_proxy_wrapper_destroy(display->wl_display_wrapper);
532 if (display->queue)
533 wl_event_queue_destroy(display->queue);
534 }
535
536 static VkResult
wsi_wl_display_init(struct wsi_wayland * wsi_wl,struct wsi_wl_display * display,struct wl_display * wl_display,bool get_format_list,bool sw)537 wsi_wl_display_init(struct wsi_wayland *wsi_wl,
538 struct wsi_wl_display *display,
539 struct wl_display *wl_display,
540 bool get_format_list, bool sw)
541 {
542 VkResult result = VK_SUCCESS;
543 memset(display, 0, sizeof(*display));
544
545 if (!u_vector_init(&display->formats, 8, sizeof(struct wsi_wl_format)))
546 return VK_ERROR_OUT_OF_HOST_MEMORY;
547
548 display->wsi_wl = wsi_wl;
549 display->wl_display = wl_display;
550 display->sw = sw;
551
552 display->queue = wl_display_create_queue(wl_display);
553 if (!display->queue) {
554 result = VK_ERROR_OUT_OF_HOST_MEMORY;
555 goto fail;
556 }
557
558 display->wl_display_wrapper = wl_proxy_create_wrapper(wl_display);
559 if (!display->wl_display_wrapper) {
560 result = VK_ERROR_OUT_OF_HOST_MEMORY;
561 goto fail;
562 }
563
564 wl_proxy_set_queue((struct wl_proxy *) display->wl_display_wrapper,
565 display->queue);
566
567 struct wl_registry *registry =
568 wl_display_get_registry(display->wl_display_wrapper);
569 if (!registry) {
570 result = VK_ERROR_OUT_OF_HOST_MEMORY;
571 goto fail;
572 }
573
574 wl_registry_add_listener(registry, ®istry_listener, display);
575
576 /* Round-trip to get wl_shm and zwp_linux_dmabuf_v1 globals */
577 wl_display_roundtrip_queue(display->wl_display, display->queue);
578 if (!display->wl_dmabuf && !display->wl_shm) {
579 result = VK_ERROR_SURFACE_LOST_KHR;
580 goto fail_registry;
581 }
582
583 /* Caller doesn't expect us to query formats/modifiers, so return */
584 if (!get_format_list)
585 goto out;
586
587 /* Round-trip again to get formats and modifiers */
588 wl_display_roundtrip_queue(display->wl_display, display->queue);
589
590 if (wsi_wl->wsi->force_bgra8_unorm_first) {
591 /* Find BGRA8_UNORM in the list and swap it to the first position if we
592 * can find it. Some apps get confused if SRGB is first in the list.
593 */
594 struct wsi_wl_format *first_fmt = u_vector_head(&display->formats);
595 struct wsi_wl_format *f, tmp_fmt;
596 f = find_format(&display->formats, VK_FORMAT_B8G8R8A8_UNORM);
597 if (f) {
598 tmp_fmt = *f;
599 *f = *first_fmt;
600 *first_fmt = tmp_fmt;
601 }
602 }
603
604 out:
605 /* We don't need this anymore */
606 wl_registry_destroy(registry);
607
608 display->refcount = 0;
609
610 return VK_SUCCESS;
611
612 fail_registry:
613 if (registry)
614 wl_registry_destroy(registry);
615
616 fail:
617 wsi_wl_display_finish(display);
618 return result;
619 }
620
621 static VkResult
wsi_wl_display_create(struct wsi_wayland * wsi,struct wl_display * wl_display,bool sw,struct wsi_wl_display ** display_out)622 wsi_wl_display_create(struct wsi_wayland *wsi, struct wl_display *wl_display,
623 bool sw,
624 struct wsi_wl_display **display_out)
625 {
626 struct wsi_wl_display *display =
627 vk_alloc(wsi->alloc, sizeof(*display), 8,
628 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
629 if (!display)
630 return VK_ERROR_OUT_OF_HOST_MEMORY;
631
632 VkResult result = wsi_wl_display_init(wsi, display, wl_display, true,
633 sw);
634 if (result != VK_SUCCESS) {
635 vk_free(wsi->alloc, display);
636 return result;
637 }
638
639 display->refcount++;
640 *display_out = display;
641
642 return result;
643 }
644
645 static struct wsi_wl_display *
wsi_wl_display_ref(struct wsi_wl_display * display)646 wsi_wl_display_ref(struct wsi_wl_display *display)
647 {
648 display->refcount++;
649 return display;
650 }
651
652 static void
wsi_wl_display_unref(struct wsi_wl_display * display)653 wsi_wl_display_unref(struct wsi_wl_display *display)
654 {
655 if (display->refcount-- > 1)
656 return;
657
658 struct wsi_wayland *wsi = display->wsi_wl;
659 wsi_wl_display_finish(display);
660 vk_free(wsi->alloc, display);
661 }
662
663 VKAPI_ATTR VkBool32 VKAPI_CALL
wsi_GetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,struct wl_display * wl_display)664 wsi_GetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,
665 uint32_t queueFamilyIndex,
666 struct wl_display *wl_display)
667 {
668 VK_FROM_HANDLE(vk_physical_device, pdevice, physicalDevice);
669 struct wsi_device *wsi_device = pdevice->wsi_device;
670 struct wsi_wayland *wsi =
671 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
672
673 struct wsi_wl_display display;
674 VkResult ret = wsi_wl_display_init(wsi, &display, wl_display, false,
675 wsi_device->sw);
676 if (ret == VK_SUCCESS)
677 wsi_wl_display_finish(&display);
678
679 return ret == VK_SUCCESS;
680 }
681
682 static VkResult
wsi_wl_surface_get_support(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,uint32_t queueFamilyIndex,VkBool32 * pSupported)683 wsi_wl_surface_get_support(VkIcdSurfaceBase *surface,
684 struct wsi_device *wsi_device,
685 uint32_t queueFamilyIndex,
686 VkBool32* pSupported)
687 {
688 *pSupported = true;
689
690 return VK_SUCCESS;
691 }
692
693 static const VkPresentModeKHR present_modes[] = {
694 VK_PRESENT_MODE_MAILBOX_KHR,
695 VK_PRESENT_MODE_FIFO_KHR,
696 };
697
698 static VkResult
wsi_wl_surface_get_capabilities(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,VkSurfaceCapabilitiesKHR * caps)699 wsi_wl_surface_get_capabilities(VkIcdSurfaceBase *surface,
700 struct wsi_device *wsi_device,
701 VkSurfaceCapabilitiesKHR* caps)
702 {
703 /* For true mailbox mode, we need at least 4 images:
704 * 1) One to scan out from
705 * 2) One to have queued for scan-out
706 * 3) One to be currently held by the Wayland compositor
707 * 4) One to render to
708 */
709 caps->minImageCount = 4;
710 /* There is no real maximum */
711 caps->maxImageCount = 0;
712
713 caps->currentExtent = (VkExtent2D) { UINT32_MAX, UINT32_MAX };
714 caps->minImageExtent = (VkExtent2D) { 1, 1 };
715 caps->maxImageExtent = (VkExtent2D) {
716 wsi_device->maxImageDimension2D,
717 wsi_device->maxImageDimension2D,
718 };
719
720 caps->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
721 caps->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
722 caps->maxImageArrayLayers = 1;
723
724 caps->supportedCompositeAlpha =
725 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
726 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
727
728 caps->supportedUsageFlags =
729 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
730 VK_IMAGE_USAGE_SAMPLED_BIT |
731 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
732 VK_IMAGE_USAGE_STORAGE_BIT |
733 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
734 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
735
736 return VK_SUCCESS;
737 }
738
739 static VkResult
wsi_wl_surface_get_capabilities2(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,const void * info_next,VkSurfaceCapabilities2KHR * caps)740 wsi_wl_surface_get_capabilities2(VkIcdSurfaceBase *surface,
741 struct wsi_device *wsi_device,
742 const void *info_next,
743 VkSurfaceCapabilities2KHR* caps)
744 {
745 assert(caps->sType == VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR);
746
747 VkResult result =
748 wsi_wl_surface_get_capabilities(surface, wsi_device,
749 &caps->surfaceCapabilities);
750
751 vk_foreach_struct(ext, caps->pNext) {
752 switch (ext->sType) {
753 case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR: {
754 VkSurfaceProtectedCapabilitiesKHR *protected = (void *)ext;
755 protected->supportsProtected = VK_FALSE;
756 break;
757 }
758
759 default:
760 /* Ignored */
761 break;
762 }
763 }
764
765 return result;
766 }
767
768 static VkResult
wsi_wl_surface_get_formats(VkIcdSurfaceBase * icd_surface,struct wsi_device * wsi_device,uint32_t * pSurfaceFormatCount,VkSurfaceFormatKHR * pSurfaceFormats)769 wsi_wl_surface_get_formats(VkIcdSurfaceBase *icd_surface,
770 struct wsi_device *wsi_device,
771 uint32_t* pSurfaceFormatCount,
772 VkSurfaceFormatKHR* pSurfaceFormats)
773 {
774 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
775 struct wsi_wayland *wsi =
776 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
777
778 struct wsi_wl_display display;
779 if (wsi_wl_display_init(wsi, &display, surface->display, true,
780 wsi_device->sw))
781 return VK_ERROR_SURFACE_LOST_KHR;
782
783 VK_OUTARRAY_MAKE_TYPED(VkSurfaceFormatKHR, out,
784 pSurfaceFormats, pSurfaceFormatCount);
785
786 struct wsi_wl_format *disp_fmt;
787 u_vector_foreach(disp_fmt, &display.formats) {
788 /* Skip formats for which we can't support both alpha & opaque
789 * formats.
790 */
791 if (!(disp_fmt->flags & WSI_WL_FMT_ALPHA) ||
792 !(disp_fmt->flags & WSI_WL_FMT_OPAQUE))
793 continue;
794
795 vk_outarray_append_typed(VkSurfaceFormatKHR, &out, out_fmt) {
796 out_fmt->format = disp_fmt->vk_format;
797 out_fmt->colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
798 }
799 }
800
801 wsi_wl_display_finish(&display);
802
803 return vk_outarray_status(&out);
804 }
805
806 static VkResult
wsi_wl_surface_get_formats2(VkIcdSurfaceBase * icd_surface,struct wsi_device * wsi_device,const void * info_next,uint32_t * pSurfaceFormatCount,VkSurfaceFormat2KHR * pSurfaceFormats)807 wsi_wl_surface_get_formats2(VkIcdSurfaceBase *icd_surface,
808 struct wsi_device *wsi_device,
809 const void *info_next,
810 uint32_t* pSurfaceFormatCount,
811 VkSurfaceFormat2KHR* pSurfaceFormats)
812 {
813 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
814 struct wsi_wayland *wsi =
815 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
816
817 struct wsi_wl_display display;
818 if (wsi_wl_display_init(wsi, &display, surface->display, true,
819 wsi_device->sw))
820 return VK_ERROR_SURFACE_LOST_KHR;
821
822 VK_OUTARRAY_MAKE_TYPED(VkSurfaceFormat2KHR, out,
823 pSurfaceFormats, pSurfaceFormatCount);
824
825 struct wsi_wl_format *disp_fmt;
826 u_vector_foreach(disp_fmt, &display.formats) {
827 /* Skip formats for which we can't support both alpha & opaque
828 * formats.
829 */
830 if (!(disp_fmt->flags & WSI_WL_FMT_ALPHA) ||
831 !(disp_fmt->flags & WSI_WL_FMT_OPAQUE))
832 continue;
833
834 vk_outarray_append_typed(VkSurfaceFormat2KHR, &out, out_fmt) {
835 out_fmt->surfaceFormat.format = disp_fmt->vk_format;
836 out_fmt->surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
837 }
838 }
839
840 wsi_wl_display_finish(&display);
841
842 return vk_outarray_status(&out);
843 }
844
845 static VkResult
wsi_wl_surface_get_present_modes(VkIcdSurfaceBase * surface,uint32_t * pPresentModeCount,VkPresentModeKHR * pPresentModes)846 wsi_wl_surface_get_present_modes(VkIcdSurfaceBase *surface,
847 uint32_t* pPresentModeCount,
848 VkPresentModeKHR* pPresentModes)
849 {
850 if (pPresentModes == NULL) {
851 *pPresentModeCount = ARRAY_SIZE(present_modes);
852 return VK_SUCCESS;
853 }
854
855 *pPresentModeCount = MIN2(*pPresentModeCount, ARRAY_SIZE(present_modes));
856 typed_memcpy(pPresentModes, present_modes, *pPresentModeCount);
857
858 if (*pPresentModeCount < ARRAY_SIZE(present_modes))
859 return VK_INCOMPLETE;
860 else
861 return VK_SUCCESS;
862 }
863
864 static VkResult
wsi_wl_surface_get_present_rectangles(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,uint32_t * pRectCount,VkRect2D * pRects)865 wsi_wl_surface_get_present_rectangles(VkIcdSurfaceBase *surface,
866 struct wsi_device *wsi_device,
867 uint32_t* pRectCount,
868 VkRect2D* pRects)
869 {
870 VK_OUTARRAY_MAKE_TYPED(VkRect2D, out, pRects, pRectCount);
871
872 vk_outarray_append_typed(VkRect2D, &out, rect) {
873 /* We don't know a size so just return the usual "I don't know." */
874 *rect = (VkRect2D) {
875 .offset = { 0, 0 },
876 .extent = { UINT32_MAX, UINT32_MAX },
877 };
878 }
879
880 return vk_outarray_status(&out);
881 }
882
883 VKAPI_ATTR VkResult VKAPI_CALL
wsi_CreateWaylandSurfaceKHR(VkInstance _instance,const VkWaylandSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)884 wsi_CreateWaylandSurfaceKHR(VkInstance _instance,
885 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
886 const VkAllocationCallbacks *pAllocator,
887 VkSurfaceKHR *pSurface)
888 {
889 VK_FROM_HANDLE(vk_instance, instance, _instance);
890 VkIcdSurfaceWayland *surface;
891
892 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR);
893
894 surface = vk_alloc2(&instance->alloc, pAllocator, sizeof *surface, 8,
895 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
896 if (surface == NULL)
897 return VK_ERROR_OUT_OF_HOST_MEMORY;
898
899 surface->base.platform = VK_ICD_WSI_PLATFORM_WAYLAND;
900 surface->display = pCreateInfo->display;
901 surface->surface = pCreateInfo->surface;
902
903 *pSurface = VkIcdSurfaceBase_to_handle(&surface->base);
904
905 return VK_SUCCESS;
906 }
907
908 struct wsi_wl_image {
909 struct wsi_image base;
910 struct wl_buffer * buffer;
911 bool busy;
912 int shm_fd;
913 void * shm_ptr;
914 unsigned shm_size;
915 };
916
917 enum wsi_wl_buffer_type {
918 WSI_WL_BUFFER_NATIVE,
919 WSI_WL_BUFFER_GPU_SHM,
920 WSI_WL_BUFFER_SHM_MEMCPY,
921 };
922
923 struct wsi_wl_swapchain {
924 struct wsi_swapchain base;
925
926 struct wsi_wl_display *display;
927
928 struct wl_surface * surface;
929
930 struct wl_callback * frame;
931
932 VkExtent2D extent;
933 VkFormat vk_format;
934 enum wsi_wl_buffer_type buffer_type;
935 uint32_t drm_format;
936 enum wl_shm_format shm_format;
937
938 uint32_t num_drm_modifiers;
939 const uint64_t * drm_modifiers;
940
941 VkPresentModeKHR present_mode;
942 bool fifo_ready;
943
944 struct wsi_wl_image images[0];
945 };
946 VK_DEFINE_NONDISP_HANDLE_CASTS(wsi_wl_swapchain, base.base, VkSwapchainKHR,
947 VK_OBJECT_TYPE_SWAPCHAIN_KHR)
948
949 static struct wsi_image *
wsi_wl_swapchain_get_wsi_image(struct wsi_swapchain * wsi_chain,uint32_t image_index)950 wsi_wl_swapchain_get_wsi_image(struct wsi_swapchain *wsi_chain,
951 uint32_t image_index)
952 {
953 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
954 return &chain->images[image_index].base;
955 }
956
957 static VkResult
wsi_wl_swapchain_acquire_next_image(struct wsi_swapchain * wsi_chain,const VkAcquireNextImageInfoKHR * info,uint32_t * image_index)958 wsi_wl_swapchain_acquire_next_image(struct wsi_swapchain *wsi_chain,
959 const VkAcquireNextImageInfoKHR *info,
960 uint32_t *image_index)
961 {
962 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
963 struct timespec start_time, end_time;
964 struct timespec rel_timeout;
965 int wl_fd = wl_display_get_fd(chain->display->wl_display);
966
967 timespec_from_nsec(&rel_timeout, info->timeout);
968
969 clock_gettime(CLOCK_MONOTONIC, &start_time);
970 timespec_add(&end_time, &rel_timeout, &start_time);
971
972 while (1) {
973 /* Try to dispatch potential events. */
974 int ret = wl_display_dispatch_queue_pending(chain->display->wl_display,
975 chain->display->queue);
976 if (ret < 0)
977 return VK_ERROR_OUT_OF_DATE_KHR;
978
979 /* Try to find a free image. */
980 for (uint32_t i = 0; i < chain->base.image_count; i++) {
981 if (!chain->images[i].busy) {
982 /* We found a non-busy image */
983 *image_index = i;
984 chain->images[i].busy = true;
985 return VK_SUCCESS;
986 }
987 }
988
989 /* Check for timeout. */
990 struct timespec current_time;
991 clock_gettime(CLOCK_MONOTONIC, ¤t_time);
992 if (timespec_after(¤t_time, &end_time))
993 return VK_NOT_READY;
994
995 /* Try to read events from the server. */
996 ret = wl_display_prepare_read_queue(chain->display->wl_display,
997 chain->display->queue);
998 if (ret < 0) {
999 /* Another thread might have read events for our queue already. Go
1000 * back to dispatch them.
1001 */
1002 if (errno == EAGAIN)
1003 continue;
1004 return VK_ERROR_OUT_OF_DATE_KHR;
1005 }
1006
1007 struct pollfd pollfd = {
1008 .fd = wl_fd,
1009 .events = POLLIN
1010 };
1011 timespec_sub(&rel_timeout, &end_time, ¤t_time);
1012 ret = ppoll(&pollfd, 1, &rel_timeout, NULL);
1013 if (ret <= 0) {
1014 int lerrno = errno;
1015 wl_display_cancel_read(chain->display->wl_display);
1016 if (ret < 0) {
1017 /* If ppoll() was interrupted, try again. */
1018 if (lerrno == EINTR || lerrno == EAGAIN)
1019 continue;
1020 return VK_ERROR_OUT_OF_DATE_KHR;
1021 }
1022 assert(ret == 0);
1023 continue;
1024 }
1025
1026 ret = wl_display_read_events(chain->display->wl_display);
1027 if (ret < 0)
1028 return VK_ERROR_OUT_OF_DATE_KHR;
1029 }
1030 }
1031
1032 static void
frame_handle_done(void * data,struct wl_callback * callback,uint32_t serial)1033 frame_handle_done(void *data, struct wl_callback *callback, uint32_t serial)
1034 {
1035 struct wsi_wl_swapchain *chain = data;
1036
1037 chain->frame = NULL;
1038 chain->fifo_ready = true;
1039
1040 wl_callback_destroy(callback);
1041 }
1042
1043 static const struct wl_callback_listener frame_listener = {
1044 frame_handle_done,
1045 };
1046
1047 static VkResult
wsi_wl_swapchain_queue_present(struct wsi_swapchain * wsi_chain,uint32_t image_index,const VkPresentRegionKHR * damage)1048 wsi_wl_swapchain_queue_present(struct wsi_swapchain *wsi_chain,
1049 uint32_t image_index,
1050 const VkPresentRegionKHR *damage)
1051 {
1052 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
1053
1054 if (chain->buffer_type == WSI_WL_BUFFER_SHM_MEMCPY) {
1055 struct wsi_wl_image *image = &chain->images[image_index];
1056 memcpy(image->shm_ptr, image->base.cpu_map,
1057 image->base.row_pitches[0] * chain->extent.height);
1058 }
1059 if (chain->base.present_mode == VK_PRESENT_MODE_FIFO_KHR) {
1060 while (!chain->fifo_ready) {
1061 int ret = wl_display_dispatch_queue(chain->display->wl_display,
1062 chain->display->queue);
1063 if (ret < 0)
1064 return VK_ERROR_OUT_OF_DATE_KHR;
1065 }
1066 }
1067
1068 assert(image_index < chain->base.image_count);
1069 wl_surface_attach(chain->surface, chain->images[image_index].buffer, 0, 0);
1070
1071 if (wl_surface_get_version(chain->surface) >= 4 && damage &&
1072 damage->pRectangles && damage->rectangleCount > 0) {
1073 for (unsigned i = 0; i < damage->rectangleCount; i++) {
1074 const VkRectLayerKHR *rect = &damage->pRectangles[i];
1075 assert(rect->layer == 0);
1076 wl_surface_damage_buffer(chain->surface,
1077 rect->offset.x, rect->offset.y,
1078 rect->extent.width, rect->extent.height);
1079 }
1080 } else {
1081 wl_surface_damage(chain->surface, 0, 0, INT32_MAX, INT32_MAX);
1082 }
1083
1084 if (chain->base.present_mode == VK_PRESENT_MODE_FIFO_KHR) {
1085 chain->frame = wl_surface_frame(chain->surface);
1086 wl_callback_add_listener(chain->frame, &frame_listener, chain);
1087 chain->fifo_ready = false;
1088 }
1089
1090 chain->images[image_index].busy = true;
1091 wl_surface_commit(chain->surface);
1092 wl_display_flush(chain->display->wl_display);
1093
1094 return VK_SUCCESS;
1095 }
1096
1097 static void
buffer_handle_release(void * data,struct wl_buffer * buffer)1098 buffer_handle_release(void *data, struct wl_buffer *buffer)
1099 {
1100 struct wsi_wl_image *image = data;
1101
1102 assert(image->buffer == buffer);
1103
1104 image->busy = false;
1105 }
1106
1107 static const struct wl_buffer_listener buffer_listener = {
1108 buffer_handle_release,
1109 };
1110
1111 static uint8_t *
wsi_wl_alloc_image_shm(struct wsi_image * imagew,unsigned size)1112 wsi_wl_alloc_image_shm(struct wsi_image *imagew, unsigned size)
1113 {
1114 struct wsi_wl_image *image = (struct wsi_wl_image *)imagew;
1115
1116 /* Create a shareable buffer */
1117 int fd = os_create_anonymous_file(size, NULL);
1118 if (fd < 0)
1119 return NULL;
1120
1121 void *ptr = mmap(NULL, size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
1122 if (ptr == MAP_FAILED) {
1123 close(fd);
1124 return NULL;
1125 }
1126
1127 image->shm_fd = fd;
1128 image->shm_ptr = ptr;
1129 image->shm_size = size;
1130
1131 return ptr;
1132 }
1133
1134 static VkResult
wsi_wl_image_init(struct wsi_wl_swapchain * chain,struct wsi_wl_image * image,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator)1135 wsi_wl_image_init(struct wsi_wl_swapchain *chain,
1136 struct wsi_wl_image *image,
1137 const VkSwapchainCreateInfoKHR *pCreateInfo,
1138 const VkAllocationCallbacks* pAllocator)
1139 {
1140 struct wsi_wl_display *display = chain->display;
1141 VkResult result;
1142
1143 result = wsi_create_image(&chain->base, &chain->base.image_info,
1144 &image->base);
1145 if (result != VK_SUCCESS)
1146 return result;
1147
1148 switch (chain->buffer_type) {
1149 case WSI_WL_BUFFER_GPU_SHM:
1150 case WSI_WL_BUFFER_SHM_MEMCPY: {
1151 if (chain->buffer_type == WSI_WL_BUFFER_SHM_MEMCPY) {
1152 wsi_wl_alloc_image_shm(&image->base, image->base.row_pitches[0] *
1153 chain->extent.height);
1154 }
1155 assert(image->shm_ptr != NULL);
1156
1157 /* Share it in a wl_buffer */
1158 struct wl_shm_pool *pool = wl_shm_create_pool(display->wl_shm,
1159 image->shm_fd,
1160 image->shm_size);
1161 wl_proxy_set_queue((struct wl_proxy *)pool, display->queue);
1162 image->buffer = wl_shm_pool_create_buffer(pool, 0, chain->extent.width,
1163 chain->extent.height,
1164 image->base.row_pitches[0],
1165 chain->shm_format);
1166 wl_shm_pool_destroy(pool);
1167 break;
1168 }
1169
1170 case WSI_WL_BUFFER_NATIVE: {
1171 assert(display->wl_dmabuf);
1172
1173 struct zwp_linux_buffer_params_v1 *params =
1174 zwp_linux_dmabuf_v1_create_params(display->wl_dmabuf);
1175 if (!params)
1176 goto fail_image;
1177
1178 for (int i = 0; i < image->base.num_planes; i++) {
1179 zwp_linux_buffer_params_v1_add(params,
1180 image->base.dma_buf_fd,
1181 i,
1182 image->base.offsets[i],
1183 image->base.row_pitches[i],
1184 image->base.drm_modifier >> 32,
1185 image->base.drm_modifier & 0xffffffff);
1186 }
1187
1188 image->buffer =
1189 zwp_linux_buffer_params_v1_create_immed(params,
1190 chain->extent.width,
1191 chain->extent.height,
1192 chain->drm_format,
1193 0);
1194 zwp_linux_buffer_params_v1_destroy(params);
1195 break;
1196 }
1197
1198 default:
1199 unreachable("Invalid buffer type");
1200 }
1201
1202 if (!image->buffer)
1203 goto fail_image;
1204
1205 wl_buffer_add_listener(image->buffer, &buffer_listener, image);
1206
1207 return VK_SUCCESS;
1208
1209 fail_image:
1210 wsi_destroy_image(&chain->base, &image->base);
1211
1212 return VK_ERROR_OUT_OF_HOST_MEMORY;
1213 }
1214
1215 static void
wsi_wl_swapchain_images_free(struct wsi_wl_swapchain * chain)1216 wsi_wl_swapchain_images_free(struct wsi_wl_swapchain *chain)
1217 {
1218 for (uint32_t i = 0; i < chain->base.image_count; i++) {
1219 if (chain->images[i].buffer) {
1220 wl_buffer_destroy(chain->images[i].buffer);
1221 wsi_destroy_image(&chain->base, &chain->images[i].base);
1222 if (chain->images[i].shm_size) {
1223 close(chain->images[i].shm_fd);
1224 munmap(chain->images[i].shm_ptr, chain->images[i].shm_size);
1225 }
1226 }
1227 }
1228 wsi_destroy_image_info(&chain->base, &chain->base.image_info);
1229 }
1230
1231 static void
wsi_wl_swapchain_chain_free(struct wsi_wl_swapchain * chain,const VkAllocationCallbacks * pAllocator)1232 wsi_wl_swapchain_chain_free(struct wsi_wl_swapchain *chain,
1233 const VkAllocationCallbacks *pAllocator)
1234 {
1235 if (chain->frame)
1236 wl_callback_destroy(chain->frame);
1237 if (chain->surface)
1238 wl_proxy_wrapper_destroy(chain->surface);
1239
1240 if (chain->display)
1241 wsi_wl_display_unref(chain->display);
1242
1243 wsi_swapchain_finish(&chain->base);
1244
1245 vk_free(pAllocator, chain);
1246 }
1247
1248 static VkResult
wsi_wl_swapchain_destroy(struct wsi_swapchain * wsi_chain,const VkAllocationCallbacks * pAllocator)1249 wsi_wl_swapchain_destroy(struct wsi_swapchain *wsi_chain,
1250 const VkAllocationCallbacks *pAllocator)
1251 {
1252 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
1253
1254 wsi_wl_swapchain_images_free(chain);
1255 wsi_wl_swapchain_chain_free(chain, pAllocator);
1256
1257 return VK_SUCCESS;
1258 }
1259
1260 static VkResult
wsi_wl_surface_create_swapchain(VkIcdSurfaceBase * icd_surface,VkDevice device,struct wsi_device * wsi_device,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,struct wsi_swapchain ** swapchain_out)1261 wsi_wl_surface_create_swapchain(VkIcdSurfaceBase *icd_surface,
1262 VkDevice device,
1263 struct wsi_device *wsi_device,
1264 const VkSwapchainCreateInfoKHR* pCreateInfo,
1265 const VkAllocationCallbacks* pAllocator,
1266 struct wsi_swapchain **swapchain_out)
1267 {
1268 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
1269 struct wsi_wayland *wsi =
1270 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
1271 struct wsi_wl_swapchain *chain;
1272 VkResult result;
1273
1274 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR);
1275
1276 int num_images = pCreateInfo->minImageCount;
1277
1278 size_t size = sizeof(*chain) + num_images * sizeof(chain->images[0]);
1279 chain = vk_zalloc(pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1280 if (chain == NULL)
1281 return VK_ERROR_OUT_OF_HOST_MEMORY;
1282
1283 result = wsi_swapchain_init(wsi_device, &chain->base, device,
1284 pCreateInfo, pAllocator, false);
1285 if (result != VK_SUCCESS) {
1286 vk_free(pAllocator, chain);
1287 return result;
1288 }
1289
1290 bool alpha = pCreateInfo->compositeAlpha ==
1291 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
1292
1293 chain->base.destroy = wsi_wl_swapchain_destroy;
1294 chain->base.get_wsi_image = wsi_wl_swapchain_get_wsi_image;
1295 chain->base.acquire_next_image = wsi_wl_swapchain_acquire_next_image;
1296 chain->base.queue_present = wsi_wl_swapchain_queue_present;
1297 chain->base.present_mode = wsi_swapchain_get_present_mode(wsi_device, pCreateInfo);
1298 chain->base.image_count = num_images;
1299 chain->extent = pCreateInfo->imageExtent;
1300 chain->vk_format = pCreateInfo->imageFormat;
1301 if (wsi_device->sw) {
1302 chain->buffer_type = (chain->base.wsi->has_import_memory_host &&
1303 !(WSI_DEBUG & WSI_DEBUG_NOSHM)) ?
1304 WSI_WL_BUFFER_GPU_SHM : WSI_WL_BUFFER_SHM_MEMCPY;
1305 chain->shm_format = wl_shm_format_for_vk_format(chain->vk_format, alpha);
1306 } else {
1307 chain->buffer_type = WSI_WL_BUFFER_NATIVE;
1308 chain->drm_format = wl_drm_format_for_vk_format(chain->vk_format, alpha);
1309 }
1310
1311 if (pCreateInfo->oldSwapchain) {
1312 /* If we have an oldSwapchain parameter, copy the display struct over
1313 * from the old one so we don't have to fully re-initialize it.
1314 */
1315 VK_FROM_HANDLE(wsi_wl_swapchain, old_chain, pCreateInfo->oldSwapchain);
1316 chain->display = wsi_wl_display_ref(old_chain->display);
1317 } else {
1318 chain->display = NULL;
1319 result = wsi_wl_display_create(wsi, surface->display,
1320 wsi_device->sw, &chain->display);
1321 if (result != VK_SUCCESS)
1322 goto fail;
1323 }
1324
1325 chain->surface = wl_proxy_create_wrapper(surface->surface);
1326 if (!chain->surface) {
1327 result = VK_ERROR_OUT_OF_HOST_MEMORY;
1328 goto fail;
1329 }
1330 wl_proxy_set_queue((struct wl_proxy *) chain->surface,
1331 chain->display->queue);
1332
1333 chain->num_drm_modifiers = 0;
1334 chain->drm_modifiers = 0;
1335
1336 /* Use explicit DRM format modifiers when both the server and the driver
1337 * support them.
1338 */
1339 if (chain->display->wl_dmabuf && chain->base.wsi->supports_modifiers) {
1340 struct wsi_wl_format *f = find_format(&chain->display->formats, chain->vk_format);
1341 if (f) {
1342 chain->drm_modifiers = u_vector_tail(&f->modifiers);
1343 chain->num_drm_modifiers = u_vector_length(&f->modifiers);
1344 }
1345 }
1346
1347 chain->fifo_ready = true;
1348
1349 switch (chain->buffer_type) {
1350 case WSI_WL_BUFFER_NATIVE:
1351 result = wsi_configure_native_image(&chain->base, pCreateInfo,
1352 chain->num_drm_modifiers > 0 ? 1 : 0,
1353 &chain->num_drm_modifiers,
1354 &chain->drm_modifiers,
1355 &chain->base.image_info);
1356 break;
1357
1358 case WSI_WL_BUFFER_GPU_SHM:
1359 result = wsi_configure_cpu_image(&chain->base, pCreateInfo,
1360 wsi_wl_alloc_image_shm,
1361 &chain->base.image_info);
1362 break;
1363
1364 case WSI_WL_BUFFER_SHM_MEMCPY:
1365 result = wsi_configure_cpu_image(&chain->base, pCreateInfo,
1366 NULL, &chain->base.image_info);
1367 break;
1368
1369 default:
1370 unreachable("Invalid buffer type");
1371 }
1372 if (result != VK_SUCCESS)
1373 goto fail;
1374
1375 for (uint32_t i = 0; i < chain->base.image_count; i++) {
1376 result = wsi_wl_image_init(chain, &chain->images[i],
1377 pCreateInfo, pAllocator);
1378 if (result != VK_SUCCESS)
1379 goto fail_image_init;
1380 chain->images[i].busy = false;
1381 }
1382
1383 *swapchain_out = &chain->base;
1384
1385 return VK_SUCCESS;
1386
1387 fail_image_init:
1388 wsi_wl_swapchain_images_free(chain);
1389
1390 fail:
1391 wsi_wl_swapchain_chain_free(chain, pAllocator);
1392
1393 return result;
1394 }
1395
1396 VkResult
wsi_wl_init_wsi(struct wsi_device * wsi_device,const VkAllocationCallbacks * alloc,VkPhysicalDevice physical_device)1397 wsi_wl_init_wsi(struct wsi_device *wsi_device,
1398 const VkAllocationCallbacks *alloc,
1399 VkPhysicalDevice physical_device)
1400 {
1401 struct wsi_wayland *wsi;
1402 VkResult result;
1403
1404 wsi = vk_alloc(alloc, sizeof(*wsi), 8,
1405 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1406 if (!wsi) {
1407 result = VK_ERROR_OUT_OF_HOST_MEMORY;
1408 goto fail;
1409 }
1410
1411 wsi->physical_device = physical_device;
1412 wsi->alloc = alloc;
1413 wsi->wsi = wsi_device;
1414
1415 wsi->base.get_support = wsi_wl_surface_get_support;
1416 wsi->base.get_capabilities2 = wsi_wl_surface_get_capabilities2;
1417 wsi->base.get_formats = wsi_wl_surface_get_formats;
1418 wsi->base.get_formats2 = wsi_wl_surface_get_formats2;
1419 wsi->base.get_present_modes = wsi_wl_surface_get_present_modes;
1420 wsi->base.get_present_rectangles = wsi_wl_surface_get_present_rectangles;
1421 wsi->base.create_swapchain = wsi_wl_surface_create_swapchain;
1422
1423 wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = &wsi->base;
1424
1425 return VK_SUCCESS;
1426
1427 fail:
1428 wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = NULL;
1429
1430 return result;
1431 }
1432
1433 void
wsi_wl_finish_wsi(struct wsi_device * wsi_device,const VkAllocationCallbacks * alloc)1434 wsi_wl_finish_wsi(struct wsi_device *wsi_device,
1435 const VkAllocationCallbacks *alloc)
1436 {
1437 struct wsi_wayland *wsi =
1438 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
1439 if (!wsi)
1440 return;
1441
1442 vk_free(alloc, wsi);
1443 }
1444