1 /**************************************************************************
2 *
3 * Copyright (C) 2014 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included
13 * in all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
16 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 **************************************************************************/
24 /* create our own EGL offscreen rendering context via gbm and rendernodes */
25
26
27 /* if we are using EGL and rendernodes then we talk via file descriptors to the remote
28 node */
29 #ifdef HAVE_CONFIG_H
30 #include "config.h"
31 #endif
32
33 #define EGL_EGLEXT_PROTOTYPES
34 #include <errno.h>
35 #include <fcntl.h>
36 #include <poll.h>
37 #include <stdbool.h>
38 #include <unistd.h>
39 #include <xf86drm.h>
40
41 #include "util/u_memory.h"
42
43 #include "virglrenderer.h"
44 #include "vrend_winsys.h"
45 #include "vrend_winsys_egl.h"
46 #include "virgl_hw.h"
47 #include "vrend_winsys_gbm.h"
48 #include "virgl_util.h"
49
50 #define EGL_KHR_SURFACELESS_CONTEXT BIT(0)
51 #define EGL_KHR_CREATE_CONTEXT BIT(1)
52 #define EGL_MESA_DRM_IMAGE BIT(2)
53 #define EGL_MESA_IMAGE_DMA_BUF_EXPORT BIT(3)
54 #define EGL_MESA_DMA_BUF_IMAGE_IMPORT BIT(4)
55 #define EGL_KHR_GL_COLORSPACE BIT(5)
56 #define EGL_EXT_IMAGE_DMA_BUF_IMPORT BIT(6)
57 #define EGL_EXT_IMAGE_DMA_BUF_IMPORT_MODIFIERS BIT(7)
58 #define EGL_KHR_FENCE_SYNC_ANDROID BIT(8)
59
60 static const struct {
61 uint32_t bit;
62 const char *string;
63 } extensions_list[] = {
64 { EGL_KHR_SURFACELESS_CONTEXT, "EGL_KHR_surfaceless_context" },
65 { EGL_KHR_CREATE_CONTEXT, "EGL_KHR_create_context" },
66 { EGL_MESA_DRM_IMAGE, "EGL_MESA_drm_image" },
67 { EGL_MESA_IMAGE_DMA_BUF_EXPORT, "EGL_MESA_image_dma_buf_export" },
68 { EGL_KHR_GL_COLORSPACE, "EGL_KHR_gl_colorspace" },
69 { EGL_EXT_IMAGE_DMA_BUF_IMPORT, "EGL_EXT_image_dma_buf_import" },
70 { EGL_EXT_IMAGE_DMA_BUF_IMPORT_MODIFIERS, "EGL_EXT_image_dma_buf_import_modifiers" },
71 { EGL_KHR_FENCE_SYNC_ANDROID, "EGL_ANDROID_native_fence_sync"}
72 };
73
74 struct virgl_egl {
75 struct virgl_gbm *gbm;
76 EGLDisplay egl_display;
77 EGLConfig egl_conf;
78 EGLContext egl_ctx;
79 uint32_t extension_bits;
80 EGLSyncKHR signaled_fence;
81 bool different_gpu;
82 };
83
virgl_egl_has_extension_in_string(const char * haystack,const char * needle)84 static bool virgl_egl_has_extension_in_string(const char *haystack, const char *needle)
85 {
86 const unsigned needle_len = strlen(needle);
87
88 if (!haystack)
89 return false;
90
91 if (needle_len == 0)
92 return false;
93
94 while (true) {
95 const char *const s = strstr(haystack, needle);
96
97 if (s == NULL)
98 return false;
99
100 if (s[needle_len] == ' ' || s[needle_len] == '\0') {
101 return true;
102 }
103
104 /* strstr found an extension whose name begins with
105 * needle, but whose name is not equal to needle.
106 * Restart the search at s + needle_len so that we
107 * don't just find the same extension again and go
108 * into an infinite loop.
109 */
110 haystack = s + needle_len;
111 }
112
113 return false;
114 }
115
virgl_egl_init_extensions(struct virgl_egl * egl,const char * extensions)116 static int virgl_egl_init_extensions(struct virgl_egl *egl, const char *extensions)
117 {
118 for (uint32_t i = 0; i < ARRAY_SIZE(extensions_list); i++) {
119 if (virgl_egl_has_extension_in_string(extensions, extensions_list[i].string))
120 egl->extension_bits |= extensions_list[i].bit;
121 }
122
123 if (!has_bits(egl->extension_bits, EGL_KHR_SURFACELESS_CONTEXT | EGL_KHR_CREATE_CONTEXT)) {
124 vrend_printf( "Missing EGL_KHR_surfaceless_context or EGL_KHR_create_context\n");
125 return -1;
126 }
127
128 return 0;
129 }
130
131 #ifdef ENABLE_MINIGBM_ALLOCATION
132
133 struct egl_funcs {
134 PFNEGLGETPLATFORMDISPLAYEXTPROC eglGetPlatformDisplay;
135 PFNEGLQUERYDEVICESEXTPROC eglQueryDevices;
136 PFNEGLQUERYDEVICESTRINGEXTPROC eglQueryDeviceString;
137 };
138
virgl_egl_get_interface(struct egl_funcs * funcs)139 static bool virgl_egl_get_interface(struct egl_funcs *funcs)
140 {
141 const char *client_extensions = eglQueryString (NULL, EGL_EXTENSIONS);
142
143 assert(funcs);
144
145 if (virgl_egl_has_extension_in_string(client_extensions, "EGL_EXT_platform_base")) {
146 funcs->eglGetPlatformDisplay =
147 (PFNEGLGETPLATFORMDISPLAYEXTPROC) eglGetProcAddress ("eglGetPlatformDisplayEXT");
148 }
149
150 if (!funcs->eglGetPlatformDisplay)
151 return false;
152
153 if (!virgl_egl_has_extension_in_string(client_extensions, "EGL_EXT_platform_device"))
154 return false;
155
156 if (!virgl_egl_has_extension_in_string(client_extensions, "EGL_EXT_device_enumeration"))
157 return false;
158
159 funcs->eglQueryDevices = (PFNEGLQUERYDEVICESEXTPROC)eglGetProcAddress ("eglQueryDevicesEXT");
160 if (!funcs->eglQueryDevices)
161 return false;
162
163 if (!virgl_egl_has_extension_in_string(client_extensions, "EGL_EXT_device_query"))
164 return false;
165
166 funcs->eglQueryDeviceString = (PFNEGLQUERYDEVICESTRINGEXTPROC)eglGetProcAddress("eglQueryDeviceStringEXT");
167 if (!funcs->eglQueryDeviceString)
168 return false;
169
170 return true;
171 }
172
virgl_egl_find_3d_device(struct gbm_device_info * dev_infos,EGLint num_devices,uint32_t flags)173 static EGLint virgl_egl_find_3d_device(struct gbm_device_info *dev_infos, EGLint num_devices, uint32_t flags)
174 {
175 EGLint d;
176
177 for (d = 0; d < num_devices; d++) {
178 if ((dev_infos[d].dev_type_flags & flags) == flags
179 && dev_infos[d].dev_type_flags & GBM_DEV_TYPE_FLAG_3D)
180 return d;
181 }
182
183 return -1;
184 }
185
virgl_egl_find_matching_device(struct gbm_device_info * dev_infos,EGLint num_devices,int dri_node_num)186 static EGLint virgl_egl_find_matching_device(struct gbm_device_info *dev_infos, EGLint num_devices, int dri_node_num)
187 {
188 EGLint d;
189
190 for (d = 0; d < num_devices; d++) {
191 if (dev_infos[d].dri_node_num == dri_node_num)
192 return d;
193 }
194
195 return -1;
196 }
197
virgl_egl_get_device(struct virgl_egl * egl,struct egl_funcs * funcs)198 static EGLDeviceEXT virgl_egl_get_device(struct virgl_egl *egl, struct egl_funcs *funcs) {
199 EGLint num_devices = 0;
200 EGLint max_devices = 64;
201 EGLDeviceEXT devices[64];
202 struct gbm_device_info dev_infos[64];
203 struct gbm_device_info gbm_dev_info;
204 EGLint device_num = -1;
205 EGLint d;
206
207 if (gbm_detect_device_info(0, gbm_device_get_fd(egl->gbm->device), &gbm_dev_info) < 0)
208 return EGL_NO_DEVICE_EXT;
209
210 if (!funcs->eglQueryDevices(max_devices, devices, &num_devices))
211 return EGL_NO_DEVICE_EXT;
212
213 /* We query EGL_DRM_DEVICE_FILE_EXT without checking EGL_EXT_device_drm extension,
214 * we just get NULL when it is not available. Otherwise we would have to query it
215 * after initializing display for every device.
216 */
217 for (d = 0; d < num_devices; d++) {
218 const char *dev_node = funcs->eglQueryDeviceString(devices[d], EGL_DRM_DEVICE_FILE_EXT);
219 memset(&dev_infos[d], 0, sizeof(dev_infos[d]));
220 if (dev_node) {
221 if (gbm_detect_device_info_path(0, dev_node, dev_infos+d) < 0)
222 return false;
223 } else {
224 dev_infos[d].dri_node_num = -1;
225 }
226 }
227
228 if (getenv("VIRGL_PREFER_DGPU"))
229 /* Find a discrete GPU. */
230 device_num = virgl_egl_find_3d_device(dev_infos, num_devices, GBM_DEV_TYPE_FLAG_DISCRETE);
231
232 if (device_num >= 0) {
233 egl->different_gpu = dev_infos[device_num].dri_node_num != gbm_dev_info.dri_node_num;
234 } else if (gbm_dev_info.dev_type_flags & GBM_DEV_TYPE_FLAG_ARMSOC) {
235 /* Find 3D device on ARM SOC. */
236 device_num = virgl_egl_find_3d_device(dev_infos, num_devices, GBM_DEV_TYPE_FLAG_ARMSOC);
237 }
238
239 if (device_num < 0) {
240 /* Try to match GBM device. */
241 device_num = virgl_egl_find_matching_device(dev_infos, num_devices, gbm_dev_info.dri_node_num);
242 }
243 if (device_num < 0)
244 return EGL_NO_DEVICE_EXT;
245
246 return devices[device_num];
247 }
248
virgl_egl_get_display(struct virgl_egl * egl)249 static bool virgl_egl_get_display(struct virgl_egl *egl)
250 {
251 struct egl_funcs funcs = { 0 };
252 EGLDeviceEXT device;
253
254 if (!egl->gbm)
255 return false;
256
257 if (!virgl_egl_get_interface(&funcs))
258 return false;
259
260 device = virgl_egl_get_device(egl, &funcs);
261
262 if (device == EGL_NO_DEVICE_EXT)
263 return false;
264
265 egl->egl_display = funcs.eglGetPlatformDisplay(EGL_PLATFORM_DEVICE_EXT, device, NULL);
266 return true;
267 }
268 #endif /* ENABLE_MINIGBM_ALLOCATION */
269
virgl_egl_init(struct virgl_gbm * gbm,bool surfaceless,bool gles)270 struct virgl_egl *virgl_egl_init(struct virgl_gbm *gbm, bool surfaceless, bool gles)
271 {
272 static EGLint conf_att[] = {
273 EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
274 EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT,
275 EGL_RED_SIZE, 1,
276 EGL_GREEN_SIZE, 1,
277 EGL_BLUE_SIZE, 1,
278 EGL_ALPHA_SIZE, 0,
279 EGL_NONE,
280 };
281 static const EGLint ctx_att[] = {
282 EGL_CONTEXT_CLIENT_VERSION, 2,
283 EGL_NONE
284 };
285 EGLBoolean success;
286 EGLenum api;
287 EGLint major, minor, num_configs;
288 const char *extensions;
289 struct virgl_egl *egl;
290
291 egl = calloc(1, sizeof(struct virgl_egl));
292 if (!egl)
293 return NULL;
294
295 if (gles)
296 conf_att[3] = EGL_OPENGL_ES2_BIT;
297
298 if (surfaceless)
299 conf_att[1] = EGL_PBUFFER_BIT;
300 else if (!gbm)
301 goto fail;
302
303 egl->gbm = gbm;
304 egl->different_gpu = false;
305 const char *client_extensions = eglQueryString (NULL, EGL_EXTENSIONS);
306
307 #ifdef ENABLE_MINIGBM_ALLOCATION
308 if (virgl_egl_get_display(egl)) {
309 /* Make -Wdangling-else happy. */
310 } else /* Fallback to surfaceless. */
311 #endif
312 if (virgl_egl_has_extension_in_string(client_extensions, "EGL_EXT_platform_base")) {
313 PFNEGLGETPLATFORMDISPLAYEXTPROC get_platform_display =
314 (PFNEGLGETPLATFORMDISPLAYEXTPROC) eglGetProcAddress ("eglGetPlatformDisplayEXT");
315
316 if (!get_platform_display)
317 goto fail;
318
319 if (surfaceless) {
320 egl->egl_display = get_platform_display (EGL_PLATFORM_SURFACELESS_MESA,
321 EGL_DEFAULT_DISPLAY, NULL);
322 } else
323 egl->egl_display = get_platform_display (EGL_PLATFORM_GBM_KHR,
324 (EGLNativeDisplayType)egl->gbm->device, NULL);
325 } else {
326 egl->egl_display = eglGetDisplay((EGLNativeDisplayType)egl->gbm->device);
327 }
328
329 if (!egl->egl_display) {
330 /*
331 * Don't fallback to the default display if the fd provided by (*get_drm_fd)
332 * can't be used.
333 */
334 if (egl->gbm && egl->gbm->fd < 0)
335 goto fail;
336
337 egl->egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
338 if (!egl->egl_display)
339 goto fail;
340 }
341
342 success = eglInitialize(egl->egl_display, &major, &minor);
343 if (!success)
344 goto fail;
345
346 extensions = eglQueryString(egl->egl_display, EGL_EXTENSIONS);
347 #ifdef VIRGL_EGL_DEBUG
348 vrend_printf( "EGL major/minor: %d.%d\n", major, minor);
349 vrend_printf( "EGL version: %s\n",
350 eglQueryString(egl->egl_display, EGL_VERSION));
351 vrend_printf( "EGL vendor: %s\n",
352 eglQueryString(egl->egl_display, EGL_VENDOR));
353 vrend_printf( "EGL extensions: %s\n", extensions);
354 #endif
355
356 if (virgl_egl_init_extensions(egl, extensions))
357 goto fail;
358
359 if (gles)
360 api = EGL_OPENGL_ES_API;
361 else
362 api = EGL_OPENGL_API;
363 success = eglBindAPI(api);
364 if (!success)
365 goto fail;
366
367 success = eglChooseConfig(egl->egl_display, conf_att, &egl->egl_conf,
368 1, &num_configs);
369 if (!success || num_configs != 1)
370 goto fail;
371
372 egl->egl_ctx = eglCreateContext(egl->egl_display, egl->egl_conf, EGL_NO_CONTEXT,
373 ctx_att);
374 if (!egl->egl_ctx)
375 goto fail;
376
377 eglMakeCurrent(egl->egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE,
378 egl->egl_ctx);
379
380 if (gles && virgl_egl_supports_fences(egl)) {
381 egl->signaled_fence = eglCreateSyncKHR(egl->egl_display,
382 EGL_SYNC_NATIVE_FENCE_ANDROID, NULL);
383 if (!egl->signaled_fence) {
384 vrend_printf("Failed to create signaled fence");
385 goto fail;
386 }
387 }
388
389 return egl;
390
391 fail:
392 free(egl);
393 return NULL;
394 }
395
virgl_egl_destroy(struct virgl_egl * egl)396 void virgl_egl_destroy(struct virgl_egl *egl)
397 {
398 if (egl->signaled_fence) {
399 eglDestroySyncKHR(egl->egl_display, egl->signaled_fence);
400 }
401 eglMakeCurrent(egl->egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE,
402 EGL_NO_CONTEXT);
403 eglDestroyContext(egl->egl_display, egl->egl_ctx);
404 eglTerminate(egl->egl_display);
405 free(egl);
406 }
407
virgl_egl_init_external(EGLDisplay egl_display)408 struct virgl_egl *virgl_egl_init_external(EGLDisplay egl_display)
409 {
410 const char *extensions;
411 struct virgl_egl *egl;
412
413 egl = calloc(1, sizeof(struct virgl_egl));
414 if (!egl)
415 return NULL;
416
417 egl->egl_display = egl_display;
418
419 extensions = eglQueryString(egl->egl_display, EGL_EXTENSIONS);
420 #ifdef VIRGL_EGL_DEBUG
421 vrend_printf( "EGL version: %s\n",
422 eglQueryString(egl->egl_display, EGL_VERSION));
423 vrend_printf( "EGL vendor: %s\n",
424 eglQueryString(egl->egl_display, EGL_VENDOR));
425 vrend_printf( "EGL extensions: %s\n", extensions);
426 #endif
427
428 if (virgl_egl_init_extensions(egl, extensions)) {
429 free(egl);
430 return NULL;
431 }
432
433 gbm = virgl_gbm_init(-1);
434 egl->gbm = gbm;
435
436 return egl;
437 }
438
virgl_egl_create_context(struct virgl_egl * egl,struct virgl_gl_ctx_param * vparams)439 virgl_renderer_gl_context virgl_egl_create_context(struct virgl_egl *egl, struct virgl_gl_ctx_param *vparams)
440 {
441 EGLContext egl_ctx;
442 EGLint ctx_att[] = {
443 EGL_CONTEXT_CLIENT_VERSION, vparams->major_ver,
444 EGL_CONTEXT_MINOR_VERSION_KHR, vparams->minor_ver,
445 EGL_NONE
446 };
447 egl_ctx = eglCreateContext(egl->egl_display,
448 egl->egl_conf,
449 vparams->shared ? eglGetCurrentContext() : EGL_NO_CONTEXT,
450 ctx_att);
451 return (virgl_renderer_gl_context)egl_ctx;
452 }
453
virgl_egl_destroy_context(struct virgl_egl * egl,virgl_renderer_gl_context virglctx)454 void virgl_egl_destroy_context(struct virgl_egl *egl, virgl_renderer_gl_context virglctx)
455 {
456 EGLContext egl_ctx = (EGLContext)virglctx;
457 eglDestroyContext(egl->egl_display, egl_ctx);
458 }
459
virgl_egl_make_context_current(struct virgl_egl * egl,virgl_renderer_gl_context virglctx)460 int virgl_egl_make_context_current(struct virgl_egl *egl, virgl_renderer_gl_context virglctx)
461 {
462 EGLContext egl_ctx = (EGLContext)virglctx;
463
464 return eglMakeCurrent(egl->egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE,
465 egl_ctx) ? 0 : -1;
466 }
467
virgl_egl_get_current_context(UNUSED struct virgl_egl * egl)468 virgl_renderer_gl_context virgl_egl_get_current_context(UNUSED struct virgl_egl *egl)
469 {
470 EGLContext egl_ctx = eglGetCurrentContext();
471 return (virgl_renderer_gl_context)egl_ctx;
472 }
473
virgl_egl_get_fourcc_for_texture(struct virgl_egl * egl,uint32_t tex_id,uint32_t format,int * fourcc)474 int virgl_egl_get_fourcc_for_texture(struct virgl_egl *egl, uint32_t tex_id, uint32_t format, int *fourcc)
475 {
476 int ret = EINVAL;
477 uint32_t gbm_format = 0;
478
479 EGLImageKHR image;
480 EGLBoolean success;
481
482 if (!has_bit(egl->extension_bits, EGL_MESA_IMAGE_DMA_BUF_EXPORT)) {
483 ret = 0;
484 goto fallback;
485 }
486
487 image = eglCreateImageKHR(egl->egl_display, eglGetCurrentContext(), EGL_GL_TEXTURE_2D_KHR,
488 (EGLClientBuffer)(uintptr_t)tex_id, NULL);
489
490 if (!image)
491 return EINVAL;
492
493 success = eglExportDMABUFImageQueryMESA(egl->egl_display, image, fourcc, NULL, NULL);
494 if (!success)
495 goto out_destroy;
496 ret = 0;
497 out_destroy:
498 eglDestroyImageKHR(egl->egl_display, image);
499 return ret;
500
501 fallback:
502 ret = virgl_gbm_convert_format(&format, &gbm_format);
503 *fourcc = (int)gbm_format;
504 return ret;
505 }
506
virgl_egl_get_fd_for_texture2(struct virgl_egl * egl,uint32_t tex_id,int * fd,int * stride,int * offset)507 int virgl_egl_get_fd_for_texture2(struct virgl_egl *egl, uint32_t tex_id, int *fd,
508 int *stride, int *offset)
509 {
510 int ret = EINVAL;
511 EGLImageKHR image = eglCreateImageKHR(egl->egl_display, eglGetCurrentContext(),
512 EGL_GL_TEXTURE_2D_KHR,
513 (EGLClientBuffer)(uintptr_t)tex_id, NULL);
514 if (!image)
515 return EINVAL;
516 if (!has_bit(egl->extension_bits, EGL_MESA_IMAGE_DMA_BUF_EXPORT))
517 goto out_destroy;
518
519 if (!eglExportDMABUFImageMESA(egl->egl_display, image, fd,
520 stride, offset))
521 goto out_destroy;
522
523 ret = 0;
524
525 out_destroy:
526 eglDestroyImageKHR(egl->egl_display, image);
527 return ret;
528 }
529
virgl_egl_get_fd_for_texture(struct virgl_egl * egl,uint32_t tex_id,int * fd)530 int virgl_egl_get_fd_for_texture(struct virgl_egl *egl, uint32_t tex_id, int *fd)
531 {
532 EGLImageKHR image;
533 EGLint stride;
534 EGLint offset;
535 EGLBoolean success;
536 int ret;
537 image = eglCreateImageKHR(egl->egl_display, eglGetCurrentContext(), EGL_GL_TEXTURE_2D_KHR,
538 (EGLClientBuffer)(uintptr_t)tex_id, NULL);
539
540 if (!image)
541 return EINVAL;
542
543 ret = EINVAL;
544 if (has_bit(egl->extension_bits, EGL_MESA_IMAGE_DMA_BUF_EXPORT)) {
545 success = eglExportDMABUFImageMESA(egl->egl_display, image, fd, &stride,
546 &offset);
547 if (!success)
548 goto out_destroy;
549 } else if (has_bit(egl->extension_bits, EGL_MESA_DRM_IMAGE)) {
550 EGLint handle;
551 success = eglExportDRMImageMESA(egl->egl_display, image, NULL, &handle,
552 &stride);
553
554 if (!success)
555 goto out_destroy;
556
557 if (!egl->gbm)
558 goto out_destroy;
559
560 ret = virgl_gbm_export_fd(egl->gbm->device, handle, fd);
561 if (ret < 0)
562 goto out_destroy;
563 } else {
564 goto out_destroy;
565 }
566
567 ret = 0;
568 out_destroy:
569 eglDestroyImageKHR(egl->egl_display, image);
570 return ret;
571 }
572
virgl_has_egl_khr_gl_colorspace(struct virgl_egl * egl)573 bool virgl_has_egl_khr_gl_colorspace(struct virgl_egl *egl)
574 {
575 return has_bit(egl->extension_bits, EGL_KHR_GL_COLORSPACE);
576 }
577
virgl_egl_image_from_dmabuf(struct virgl_egl * egl,uint32_t width,uint32_t height,uint32_t drm_format,uint64_t drm_modifier,uint32_t plane_count,const int * plane_fds,const uint32_t * plane_strides,const uint32_t * plane_offsets)578 void *virgl_egl_image_from_dmabuf(struct virgl_egl *egl,
579 uint32_t width,
580 uint32_t height,
581 uint32_t drm_format,
582 uint64_t drm_modifier,
583 uint32_t plane_count,
584 const int *plane_fds,
585 const uint32_t *plane_strides,
586 const uint32_t *plane_offsets)
587 {
588 EGLint attrs[6 + VIRGL_GBM_MAX_PLANES * 10 + 1];
589 uint32_t count;
590
591 assert(VIRGL_GBM_MAX_PLANES <= 4);
592 assert(plane_count && plane_count <= VIRGL_GBM_MAX_PLANES);
593
594 count = 0;
595 attrs[count++] = EGL_WIDTH;
596 attrs[count++] = width;
597 attrs[count++] = EGL_HEIGHT;
598 attrs[count++] = height;
599 attrs[count++] = EGL_LINUX_DRM_FOURCC_EXT;
600 attrs[count++] = drm_format;
601 for (uint32_t i = 0; i < plane_count; i++) {
602 if (i < 3) {
603 attrs[count++] = EGL_DMA_BUF_PLANE0_FD_EXT + i * 3;
604 attrs[count++] = plane_fds[i];
605 attrs[count++] = EGL_DMA_BUF_PLANE0_PITCH_EXT + i * 3;
606 attrs[count++] = plane_strides[i];
607 attrs[count++] = EGL_DMA_BUF_PLANE0_OFFSET_EXT + i * 3;
608 attrs[count++] = plane_offsets[i];
609 }
610
611 if (has_bit(egl->extension_bits, EGL_EXT_IMAGE_DMA_BUF_IMPORT_MODIFIERS)) {
612 if (i == 3) {
613 attrs[count++] = EGL_DMA_BUF_PLANE3_FD_EXT;
614 attrs[count++] = plane_fds[i];
615 attrs[count++] = EGL_DMA_BUF_PLANE3_PITCH_EXT;
616 attrs[count++] = plane_strides[i];
617 attrs[count++] = EGL_DMA_BUF_PLANE3_OFFSET_EXT;
618 attrs[count++] = plane_offsets[i];
619 }
620
621 if (drm_modifier != DRM_FORMAT_MOD_INVALID) {
622 attrs[count++] = EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT + i * 2;
623 attrs[count++] = (uint32_t)drm_modifier;
624 attrs[count++] = EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT + i * 2;
625 attrs[count++] = (uint32_t)(drm_modifier >> 32);
626 }
627 }
628 }
629 attrs[count++] = EGL_NONE;
630 assert(count <= ARRAY_SIZE(attrs));
631
632 return (void *)eglCreateImageKHR(egl->egl_display,
633 EGL_NO_CONTEXT,
634 EGL_LINUX_DMA_BUF_EXT,
635 (EGLClientBuffer)NULL,
636 attrs);
637 }
638
virgl_egl_image_destroy(struct virgl_egl * egl,void * image)639 void virgl_egl_image_destroy(struct virgl_egl *egl, void *image)
640 {
641 eglDestroyImageKHR(egl->egl_display, image);
642 }
643
644 #ifdef ENABLE_MINIGBM_ALLOCATION
virgl_egl_image_from_gbm_bo(struct virgl_egl * egl,struct gbm_bo * bo)645 void *virgl_egl_image_from_gbm_bo(struct virgl_egl *egl, struct gbm_bo *bo)
646 {
647 int ret;
648 void *image = NULL;
649 int fds[VIRGL_GBM_MAX_PLANES] = {-1, -1, -1, -1};
650 uint32_t strides[VIRGL_GBM_MAX_PLANES];
651 uint32_t offsets[VIRGL_GBM_MAX_PLANES];
652 int num_planes = gbm_bo_get_plane_count(bo);
653
654 if (num_planes < 0 || num_planes > VIRGL_GBM_MAX_PLANES)
655 return NULL;
656
657 for (int plane = 0; plane < num_planes; plane++) {
658 uint32_t handle = gbm_bo_get_handle_for_plane(bo, plane).u32;
659 ret = virgl_gbm_export_fd(egl->gbm->device, handle, &fds[plane]);
660 if (ret < 0) {
661 vrend_printf( "failed to export plane handle\n");
662 goto out_close;
663 }
664
665 strides[plane] = gbm_bo_get_stride_for_plane(bo, plane);
666 offsets[plane] = gbm_bo_get_offset(bo, plane);
667 }
668
669 image = virgl_egl_image_from_dmabuf(egl,
670 gbm_bo_get_width(bo),
671 gbm_bo_get_height(bo),
672 gbm_bo_get_format(bo),
673 gbm_bo_get_modifier(bo),
674 num_planes,
675 fds,
676 strides,
677 offsets);
678
679 out_close:
680 for (int plane = 0; plane < num_planes; plane++)
681 close(fds[plane]);
682
683 return image;
684 }
685
virgl_egl_aux_plane_image_from_gbm_bo(struct virgl_egl * egl,struct gbm_bo * bo,int plane)686 void *virgl_egl_aux_plane_image_from_gbm_bo(struct virgl_egl *egl, struct gbm_bo *bo, int plane)
687 {
688 int ret;
689 void *image = NULL;
690 int fd = -1;
691
692 int bytes_per_pixel = virgl_gbm_get_plane_bytes_per_pixel(bo, plane);
693 if (bytes_per_pixel != 1 && bytes_per_pixel != 2)
694 return NULL;
695
696 uint32_t handle = gbm_bo_get_handle_for_plane(bo, plane).u32;
697 ret = drmPrimeHandleToFD(gbm_device_get_fd(egl->gbm->device), handle, DRM_CLOEXEC, &fd);
698 if (ret < 0) {
699 vrend_printf("failed to export plane handle %d\n", errno);
700 return NULL;
701 }
702
703 const uint32_t format = bytes_per_pixel == 1 ? GBM_FORMAT_R8 : GBM_FORMAT_GR88;
704 const uint32_t stride = gbm_bo_get_stride_for_plane(bo, plane);
705 const uint32_t offset = gbm_bo_get_offset(bo, plane);
706 image = virgl_egl_image_from_dmabuf(egl,
707 virgl_gbm_get_plane_width(bo, plane),
708 virgl_gbm_get_plane_height(bo, plane),
709 format,
710 gbm_bo_get_modifier(bo),
711 1,
712 &fd,
713 &stride,
714 &offset);
715 close(fd);
716
717 return image;
718 }
719 #endif /* ENABLE_MINIGBM_ALLOCATION */
720
virgl_egl_supports_fences(struct virgl_egl * egl)721 bool virgl_egl_supports_fences(struct virgl_egl *egl)
722 {
723 return (egl && has_bit(egl->extension_bits, EGL_KHR_FENCE_SYNC_ANDROID));
724 }
725
virgl_egl_fence_create(struct virgl_egl * egl)726 EGLSyncKHR virgl_egl_fence_create(struct virgl_egl *egl)
727 {
728 if (!egl || !has_bit(egl->extension_bits, EGL_KHR_FENCE_SYNC_ANDROID)) {
729 return EGL_NO_SYNC_KHR;
730 }
731
732 return eglCreateSyncKHR(egl->egl_display, EGL_SYNC_NATIVE_FENCE_ANDROID, NULL);
733 }
734
virgl_egl_fence_destroy(struct virgl_egl * egl,EGLSyncKHR fence)735 void virgl_egl_fence_destroy(struct virgl_egl *egl, EGLSyncKHR fence) {
736 eglDestroySyncKHR(egl->egl_display, fence);
737 }
738
virgl_egl_client_wait_fence(struct virgl_egl * egl,EGLSyncKHR fence,bool blocking)739 bool virgl_egl_client_wait_fence(struct virgl_egl *egl, EGLSyncKHR fence, bool blocking)
740 {
741 /* attempt to poll the native fence fd instead of eglClientWaitSyncKHR() to
742 * avoid Mesa's eglapi global-display-lock synchronizing vrend's sync_thread.
743 */
744 int fd = -1;
745 if (!virgl_egl_export_fence(egl, fence, &fd)) {
746 EGLint egl_result = eglClientWaitSyncKHR(egl->egl_display, fence, 0,
747 blocking ? EGL_FOREVER_KHR : 0);
748 if (egl_result == EGL_FALSE)
749 vrend_printf("wait sync failed\n");
750 return egl_result != EGL_TIMEOUT_EXPIRED_KHR;
751 }
752 assert(fd >= 0);
753
754 int ret;
755 struct pollfd pfd = {
756 .fd = fd,
757 .events = POLLIN,
758 };
759 do {
760 ret = poll(&pfd, 1, blocking ? -1 : 0);
761 if (ret > 0 && (pfd.revents & (POLLERR | POLLNVAL))) {
762 ret = -1;
763 break;
764 }
765 } while (ret == -1 && (errno == EINTR || errno == EAGAIN));
766 close(fd);
767
768 if (ret < 0)
769 vrend_printf("wait sync failed\n");
770 return ret != 0;
771 }
772
virgl_egl_export_signaled_fence(struct virgl_egl * egl,int * out_fd)773 bool virgl_egl_export_signaled_fence(struct virgl_egl *egl, int *out_fd) {
774 return virgl_egl_export_fence(egl, egl->signaled_fence, out_fd);
775 }
776
virgl_egl_export_fence(struct virgl_egl * egl,EGLSyncKHR fence,int * out_fd)777 bool virgl_egl_export_fence(struct virgl_egl *egl, EGLSyncKHR fence, int *out_fd) {
778 *out_fd = eglDupNativeFenceFDANDROID(egl->egl_display, fence);
779 return *out_fd != EGL_NO_NATIVE_FENCE_FD_ANDROID;
780 }
781
virgl_egl_different_gpu(struct virgl_egl * egl)782 bool virgl_egl_different_gpu(struct virgl_egl *egl)
783 {
784 return egl->different_gpu;
785 }
786
virgl_egl_error_string(EGLint error)787 const char *virgl_egl_error_string(EGLint error)
788 {
789 switch (error) {
790 #define CASE_STR( value ) case value: return #value;
791 CASE_STR( EGL_SUCCESS )
792 CASE_STR( EGL_NOT_INITIALIZED )
793 CASE_STR( EGL_BAD_ACCESS )
794 CASE_STR( EGL_BAD_ALLOC )
795 CASE_STR( EGL_BAD_ATTRIBUTE )
796 CASE_STR( EGL_BAD_CONTEXT )
797 CASE_STR( EGL_BAD_CONFIG )
798 CASE_STR( EGL_BAD_CURRENT_SURFACE )
799 CASE_STR( EGL_BAD_DISPLAY )
800 CASE_STR( EGL_BAD_SURFACE )
801 CASE_STR( EGL_BAD_MATCH )
802 CASE_STR( EGL_BAD_PARAMETER )
803 CASE_STR( EGL_BAD_NATIVE_PIXMAP )
804 CASE_STR( EGL_BAD_NATIVE_WINDOW )
805 CASE_STR( EGL_CONTEXT_LOST )
806 #undef CASE_STR
807 default: return "Unknown error";
808 }
809 }
810