1 /**************************************************************************
2 *
3 * Copyright (C) 2014 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included
13 * in all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
16 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 **************************************************************************/
24 /* create our own EGL offscreen rendering context via gbm and rendernodes */
25
26
27 /* if we are using EGL and rendernodes then we talk via file descriptors to the remote
28 node */
29 #ifdef HAVE_CONFIG_H
30 #include "config.h"
31 #endif
32
33 #define EGL_EGLEXT_PROTOTYPES
34 #include <errno.h>
35 #include <fcntl.h>
36 #include <stdbool.h>
37 #include <xf86drm.h>
38
39 #include "util/u_memory.h"
40
41 #include "virglrenderer.h"
42 #include "vrend_winsys.h"
43 #include "vrend_winsys_egl.h"
44 #include "virgl_hw.h"
45 #include "vrend_winsys_gbm.h"
46 #include "virgl_util.h"
47
48 #define EGL_KHR_SURFACELESS_CONTEXT BIT(0)
49 #define EGL_KHR_CREATE_CONTEXT BIT(1)
50 #define EGL_MESA_DRM_IMAGE BIT(2)
51 #define EGL_MESA_IMAGE_DMA_BUF_EXPORT BIT(3)
52 #define EGL_MESA_DMA_BUF_IMAGE_IMPORT BIT(4)
53 #define EGL_KHR_GL_COLORSPACE BIT(5)
54 #define EGL_EXT_IMAGE_DMA_BUF_IMPORT BIT(6)
55 #define EGL_EXT_IMAGE_DMA_BUF_IMPORT_MODIFIERS BIT(7)
56 #define EGL_KHR_FENCE_SYNC_ANDROID BIT(8)
57
58 static const struct {
59 uint32_t bit;
60 const char *string;
61 } extensions_list[] = {
62 { EGL_KHR_SURFACELESS_CONTEXT, "EGL_KHR_surfaceless_context" },
63 { EGL_KHR_CREATE_CONTEXT, "EGL_KHR_create_context" },
64 { EGL_MESA_DRM_IMAGE, "EGL_MESA_drm_image" },
65 { EGL_MESA_IMAGE_DMA_BUF_EXPORT, "EGL_MESA_image_dma_buf_export" },
66 { EGL_KHR_GL_COLORSPACE, "EGL_KHR_gl_colorspace" },
67 { EGL_EXT_IMAGE_DMA_BUF_IMPORT, "EGL_EXT_image_dma_buf_import" },
68 { EGL_EXT_IMAGE_DMA_BUF_IMPORT_MODIFIERS, "EGL_EXT_image_dma_buf_import_modifiers" },
69 { EGL_KHR_FENCE_SYNC_ANDROID, "EGL_ANDROID_native_fence_sync"}
70 };
71
72 struct virgl_egl {
73 struct virgl_gbm *gbm;
74 EGLDisplay egl_display;
75 EGLConfig egl_conf;
76 EGLContext egl_ctx;
77 uint32_t extension_bits;
78 EGLSyncKHR signaled_fence;
79 bool different_gpu;
80 };
81
virgl_egl_has_extension_in_string(const char * haystack,const char * needle)82 static bool virgl_egl_has_extension_in_string(const char *haystack, const char *needle)
83 {
84 const unsigned needle_len = strlen(needle);
85
86 if (!haystack)
87 return false;
88
89 if (needle_len == 0)
90 return false;
91
92 while (true) {
93 const char *const s = strstr(haystack, needle);
94
95 if (s == NULL)
96 return false;
97
98 if (s[needle_len] == ' ' || s[needle_len] == '\0') {
99 return true;
100 }
101
102 /* strstr found an extension whose name begins with
103 * needle, but whose name is not equal to needle.
104 * Restart the search at s + needle_len so that we
105 * don't just find the same extension again and go
106 * into an infinite loop.
107 */
108 haystack = s + needle_len;
109 }
110
111 return false;
112 }
113
virgl_egl_init_extensions(struct virgl_egl * egl,const char * extensions)114 static int virgl_egl_init_extensions(struct virgl_egl *egl, const char *extensions)
115 {
116 for (uint32_t i = 0; i < ARRAY_SIZE(extensions_list); i++) {
117 if (virgl_egl_has_extension_in_string(extensions, extensions_list[i].string))
118 egl->extension_bits |= extensions_list[i].bit;
119 }
120
121 if (!has_bits(egl->extension_bits, EGL_KHR_SURFACELESS_CONTEXT | EGL_KHR_CREATE_CONTEXT)) {
122 vrend_printf( "Missing EGL_KHR_surfaceless_context or EGL_KHR_create_context\n");
123 return -1;
124 }
125
126 return 0;
127 }
128
129 #ifdef ENABLE_MINIGBM_ALLOCATION
130
131 struct egl_funcs {
132 PFNEGLGETPLATFORMDISPLAYEXTPROC eglGetPlatformDisplay;
133 PFNEGLQUERYDEVICESEXTPROC eglQueryDevices;
134 PFNEGLQUERYDEVICESTRINGEXTPROC eglQueryDeviceString;
135 };
136
virgl_egl_get_interface(struct egl_funcs * funcs)137 static bool virgl_egl_get_interface(struct egl_funcs *funcs)
138 {
139 const char *client_extensions = eglQueryString (NULL, EGL_EXTENSIONS);
140
141 assert(funcs);
142
143 if (virgl_egl_has_extension_in_string(client_extensions, "EGL_KHR_platform_base")) {
144 funcs->eglGetPlatformDisplay =
145 (PFNEGLGETPLATFORMDISPLAYEXTPROC) eglGetProcAddress ("eglGetPlatformDisplay");
146 } else if (virgl_egl_has_extension_in_string(client_extensions, "EGL_EXT_platform_base")) {
147 funcs->eglGetPlatformDisplay =
148 (PFNEGLGETPLATFORMDISPLAYEXTPROC) eglGetProcAddress ("eglGetPlatformDisplayEXT");
149 }
150
151 if (!funcs->eglGetPlatformDisplay)
152 return false;
153
154 if (!virgl_egl_has_extension_in_string(client_extensions, "EGL_EXT_platform_device"))
155 return false;
156
157 if (!virgl_egl_has_extension_in_string(client_extensions, "EGL_EXT_device_enumeration"))
158 return false;
159
160 funcs->eglQueryDevices = (PFNEGLQUERYDEVICESEXTPROC)eglGetProcAddress ("eglQueryDevicesEXT");
161 if (!funcs->eglQueryDevices)
162 return false;
163
164 if (!virgl_egl_has_extension_in_string(client_extensions, "EGL_EXT_device_query"))
165 return false;
166
167 funcs->eglQueryDeviceString = (PFNEGLQUERYDEVICESTRINGEXTPROC)eglGetProcAddress("eglQueryDeviceStringEXT");
168 if (!funcs->eglQueryDeviceString)
169 return false;
170
171 return true;
172 }
173
virgl_egl_find_3d_device(struct gbm_device_info * dev_infos,EGLint num_devices,uint32_t flags)174 static EGLint virgl_egl_find_3d_device(struct gbm_device_info *dev_infos, EGLint num_devices, uint32_t flags)
175 {
176 EGLint d;
177
178 for (d = 0; d < num_devices; d++) {
179 if ((dev_infos[d].dev_type_flags & flags) == flags
180 && dev_infos[d].dev_type_flags & GBM_DEV_TYPE_FLAG_3D)
181 return d;
182 }
183
184 return -1;
185 }
186
virgl_egl_find_matching_device(struct gbm_device_info * dev_infos,EGLint num_devices,int dri_node_num)187 static EGLint virgl_egl_find_matching_device(struct gbm_device_info *dev_infos, EGLint num_devices, int dri_node_num)
188 {
189 EGLint d;
190
191 for (d = 0; d < num_devices; d++) {
192 if (dev_infos[d].dri_node_num == dri_node_num)
193 return d;
194 }
195
196 return -1;
197 }
198
virgl_egl_get_device(struct virgl_egl * egl,struct egl_funcs * funcs)199 static EGLDeviceEXT virgl_egl_get_device(struct virgl_egl *egl, struct egl_funcs *funcs) {
200 EGLint num_devices = 0;
201 EGLint max_devices = 64;
202 EGLDeviceEXT devices[64];
203 struct gbm_device_info dev_infos[64];
204 struct gbm_device_info gbm_dev_info;
205 EGLint device_num = -1;
206 EGLint d;
207
208 if (gbm_detect_device_info(0, gbm_device_get_fd(egl->gbm->device), &gbm_dev_info) < 0)
209 return EGL_NO_DEVICE_EXT;
210
211 if (!funcs->eglQueryDevices(max_devices, devices, &num_devices))
212 return EGL_NO_DEVICE_EXT;
213
214 /* We query EGL_DRM_DEVICE_FILE_EXT without checking EGL_EXT_device_drm extension,
215 * we just get NULL when it is not available. Otherwise we would have to query it
216 * after initializing display for every device.
217 */
218 for (d = 0; d < num_devices; d++) {
219 const char *dev_node = funcs->eglQueryDeviceString(devices[d], EGL_DRM_DEVICE_FILE_EXT);
220 memset(&dev_infos[d], 0, sizeof(dev_infos[d]));
221 if (dev_node) {
222 if (gbm_detect_device_info_path(0, dev_node, dev_infos+d) < 0)
223 return false;
224 } else {
225 dev_infos[d].dri_node_num = -1;
226 }
227 }
228
229 if (getenv("VIRGL_PREFER_DGPU"))
230 /* Find a discrete GPU. */
231 device_num = virgl_egl_find_3d_device(dev_infos, num_devices, GBM_DEV_TYPE_FLAG_DISCRETE);
232
233 if (device_num >= 0) {
234 egl->different_gpu = dev_infos[device_num].dri_node_num != gbm_dev_info.dri_node_num;
235 } else if (gbm_dev_info.dev_type_flags & GBM_DEV_TYPE_FLAG_ARMSOC) {
236 /* Find 3D device on ARM SOC. */
237 device_num = virgl_egl_find_3d_device(dev_infos, num_devices, GBM_DEV_TYPE_FLAG_ARMSOC);
238 }
239
240 if (device_num < 0) {
241 /* Try to match GBM device. */
242 device_num = virgl_egl_find_matching_device(dev_infos, num_devices, gbm_dev_info.dri_node_num);
243 }
244 if (device_num < 0)
245 return EGL_NO_DEVICE_EXT;
246
247 return devices[device_num];
248 }
249
virgl_egl_get_display(struct virgl_egl * egl)250 static bool virgl_egl_get_display(struct virgl_egl *egl)
251 {
252 struct egl_funcs funcs = { 0 };
253 EGLDeviceEXT device;
254
255 if (!egl->gbm)
256 return false;
257
258 if (!virgl_egl_get_interface(&funcs))
259 return false;
260
261 device = virgl_egl_get_device(egl, &funcs);
262
263 if (device == EGL_NO_DEVICE_EXT)
264 return false;
265
266 egl->egl_display = funcs.eglGetPlatformDisplay(EGL_PLATFORM_DEVICE_EXT, device, NULL);
267 return true;
268 }
269 #endif /* ENABLE_MINIGBM_ALLOCATION */
270
virgl_egl_init(struct virgl_gbm * gbm,bool surfaceless,bool gles)271 struct virgl_egl *virgl_egl_init(struct virgl_gbm *gbm, bool surfaceless, bool gles)
272 {
273 static EGLint conf_att[] = {
274 EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
275 EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT,
276 EGL_RED_SIZE, 1,
277 EGL_GREEN_SIZE, 1,
278 EGL_BLUE_SIZE, 1,
279 EGL_ALPHA_SIZE, 0,
280 EGL_NONE,
281 };
282 static const EGLint ctx_att[] = {
283 EGL_CONTEXT_CLIENT_VERSION, 2,
284 EGL_NONE
285 };
286 EGLBoolean success;
287 EGLenum api;
288 EGLint major, minor, num_configs;
289 const char *extensions;
290 struct virgl_egl *egl;
291
292 egl = calloc(1, sizeof(struct virgl_egl));
293 if (!egl)
294 return NULL;
295
296 if (gles)
297 conf_att[3] = EGL_OPENGL_ES2_BIT;
298
299 if (surfaceless)
300 conf_att[1] = EGL_PBUFFER_BIT;
301 else if (!gbm)
302 goto fail;
303
304 egl->gbm = gbm;
305 egl->different_gpu = false;
306 const char *client_extensions = eglQueryString (NULL, EGL_EXTENSIONS);
307
308 #ifdef ENABLE_MINIGBM_ALLOCATION
309 if (virgl_egl_get_display(egl)) {
310 /* Make -Wdangling-else happy. */
311 } else /* Fallback to surfaceless. */
312 #endif
313 if (virgl_egl_has_extension_in_string(client_extensions, "EGL_KHR_platform_base")) {
314 PFNEGLGETPLATFORMDISPLAYEXTPROC get_platform_display =
315 (PFNEGLGETPLATFORMDISPLAYEXTPROC) eglGetProcAddress ("eglGetPlatformDisplay");
316
317 if (!get_platform_display)
318 goto fail;
319
320 if (surfaceless) {
321 egl->egl_display = get_platform_display (EGL_PLATFORM_SURFACELESS_MESA,
322 EGL_DEFAULT_DISPLAY, NULL);
323 } else
324 egl->egl_display = get_platform_display (EGL_PLATFORM_GBM_KHR,
325 (EGLNativeDisplayType)egl->gbm->device, NULL);
326 } else if (virgl_egl_has_extension_in_string(client_extensions, "EGL_EXT_platform_base")) {
327 PFNEGLGETPLATFORMDISPLAYEXTPROC get_platform_display =
328 (PFNEGLGETPLATFORMDISPLAYEXTPROC) eglGetProcAddress ("eglGetPlatformDisplayEXT");
329
330 if (!get_platform_display)
331 goto fail;
332
333 if (surfaceless) {
334 egl->egl_display = get_platform_display (EGL_PLATFORM_SURFACELESS_MESA,
335 EGL_DEFAULT_DISPLAY, NULL);
336 } else
337 egl->egl_display = get_platform_display (EGL_PLATFORM_GBM_KHR,
338 (EGLNativeDisplayType)egl->gbm->device, NULL);
339 } else {
340 egl->egl_display = eglGetDisplay((EGLNativeDisplayType)egl->gbm->device);
341 }
342
343 if (!egl->egl_display) {
344 /*
345 * Don't fallback to the default display if the fd provided by (*get_drm_fd)
346 * can't be used.
347 */
348 if (egl->gbm && egl->gbm->fd < 0)
349 goto fail;
350
351 egl->egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
352 if (!egl->egl_display)
353 goto fail;
354 }
355
356 success = eglInitialize(egl->egl_display, &major, &minor);
357 if (!success)
358 goto fail;
359
360 extensions = eglQueryString(egl->egl_display, EGL_EXTENSIONS);
361 #ifdef VIRGL_EGL_DEBUG
362 vrend_printf( "EGL major/minor: %d.%d\n", major, minor);
363 vrend_printf( "EGL version: %s\n",
364 eglQueryString(egl->egl_display, EGL_VERSION));
365 vrend_printf( "EGL vendor: %s\n",
366 eglQueryString(egl->egl_display, EGL_VENDOR));
367 vrend_printf( "EGL extensions: %s\n", extensions);
368 #endif
369
370 if (virgl_egl_init_extensions(egl, extensions))
371 goto fail;
372
373 if (gles)
374 api = EGL_OPENGL_ES_API;
375 else
376 api = EGL_OPENGL_API;
377 success = eglBindAPI(api);
378 if (!success)
379 goto fail;
380
381 success = eglChooseConfig(egl->egl_display, conf_att, &egl->egl_conf,
382 1, &num_configs);
383 if (!success || num_configs != 1)
384 goto fail;
385
386 egl->egl_ctx = eglCreateContext(egl->egl_display, egl->egl_conf, EGL_NO_CONTEXT,
387 ctx_att);
388 if (!egl->egl_ctx)
389 goto fail;
390
391 eglMakeCurrent(egl->egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE,
392 egl->egl_ctx);
393
394 if (gles && virgl_egl_supports_fences(egl)) {
395 egl->signaled_fence = eglCreateSyncKHR(egl->egl_display,
396 EGL_SYNC_NATIVE_FENCE_ANDROID, NULL);
397 if (!egl->signaled_fence) {
398 vrend_printf("Failed to create signaled fence");
399 goto fail;
400 }
401 }
402
403 return egl;
404
405 fail:
406 free(egl);
407 return NULL;
408 }
409
virgl_egl_destroy(struct virgl_egl * egl)410 void virgl_egl_destroy(struct virgl_egl *egl)
411 {
412 if (egl->signaled_fence) {
413 eglDestroySyncKHR(egl->egl_display, egl->signaled_fence);
414 }
415 eglMakeCurrent(egl->egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE,
416 EGL_NO_CONTEXT);
417 eglDestroyContext(egl->egl_display, egl->egl_ctx);
418 eglTerminate(egl->egl_display);
419 free(egl);
420 }
421
virgl_egl_create_context(struct virgl_egl * egl,struct virgl_gl_ctx_param * vparams)422 virgl_renderer_gl_context virgl_egl_create_context(struct virgl_egl *egl, struct virgl_gl_ctx_param *vparams)
423 {
424 EGLContext egl_ctx;
425 EGLint ctx_att[] = {
426 EGL_CONTEXT_CLIENT_VERSION, vparams->major_ver,
427 EGL_CONTEXT_MINOR_VERSION_KHR, vparams->minor_ver,
428 EGL_NONE
429 };
430 egl_ctx = eglCreateContext(egl->egl_display,
431 egl->egl_conf,
432 vparams->shared ? eglGetCurrentContext() : EGL_NO_CONTEXT,
433 ctx_att);
434 return (virgl_renderer_gl_context)egl_ctx;
435 }
436
virgl_egl_destroy_context(struct virgl_egl * egl,virgl_renderer_gl_context virglctx)437 void virgl_egl_destroy_context(struct virgl_egl *egl, virgl_renderer_gl_context virglctx)
438 {
439 EGLContext egl_ctx = (EGLContext)virglctx;
440 eglDestroyContext(egl->egl_display, egl_ctx);
441 }
442
virgl_egl_make_context_current(struct virgl_egl * egl,virgl_renderer_gl_context virglctx)443 int virgl_egl_make_context_current(struct virgl_egl *egl, virgl_renderer_gl_context virglctx)
444 {
445 EGLContext egl_ctx = (EGLContext)virglctx;
446
447 return eglMakeCurrent(egl->egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE,
448 egl_ctx);
449 }
450
virgl_egl_get_current_context(UNUSED struct virgl_egl * egl)451 virgl_renderer_gl_context virgl_egl_get_current_context(UNUSED struct virgl_egl *egl)
452 {
453 EGLContext egl_ctx = eglGetCurrentContext();
454 return (virgl_renderer_gl_context)egl_ctx;
455 }
456
virgl_egl_get_fourcc_for_texture(struct virgl_egl * egl,uint32_t tex_id,uint32_t format,int * fourcc)457 int virgl_egl_get_fourcc_for_texture(struct virgl_egl *egl, uint32_t tex_id, uint32_t format, int *fourcc)
458 {
459 int ret = EINVAL;
460 uint32_t gbm_format = 0;
461
462 EGLImageKHR image;
463 EGLBoolean success;
464
465 if (!has_bit(egl->extension_bits, EGL_MESA_IMAGE_DMA_BUF_EXPORT)) {
466 ret = 0;
467 goto fallback;
468 }
469
470 image = eglCreateImageKHR(egl->egl_display, eglGetCurrentContext(), EGL_GL_TEXTURE_2D_KHR,
471 (EGLClientBuffer)(unsigned long)tex_id, NULL);
472
473 if (!image)
474 return EINVAL;
475
476 success = eglExportDMABUFImageQueryMESA(egl->egl_display, image, fourcc, NULL, NULL);
477 if (!success)
478 goto out_destroy;
479 ret = 0;
480 out_destroy:
481 eglDestroyImageKHR(egl->egl_display, image);
482 return ret;
483
484 fallback:
485 ret = virgl_gbm_convert_format(&format, &gbm_format);
486 *fourcc = (int)gbm_format;
487 return ret;
488 }
489
virgl_egl_get_fd_for_texture2(struct virgl_egl * egl,uint32_t tex_id,int * fd,int * stride,int * offset)490 int virgl_egl_get_fd_for_texture2(struct virgl_egl *egl, uint32_t tex_id, int *fd,
491 int *stride, int *offset)
492 {
493 int ret = EINVAL;
494 EGLImageKHR image = eglCreateImageKHR(egl->egl_display, eglGetCurrentContext(),
495 EGL_GL_TEXTURE_2D_KHR,
496 (EGLClientBuffer)(unsigned long)tex_id, NULL);
497 if (!image)
498 return EINVAL;
499 if (!has_bit(egl->extension_bits, EGL_MESA_IMAGE_DMA_BUF_EXPORT))
500 goto out_destroy;
501
502 if (!eglExportDMABUFImageMESA(egl->egl_display, image, fd,
503 stride, offset))
504 goto out_destroy;
505
506 ret = 0;
507
508 out_destroy:
509 eglDestroyImageKHR(egl->egl_display, image);
510 return ret;
511 }
512
virgl_egl_get_fd_for_texture(struct virgl_egl * egl,uint32_t tex_id,int * fd)513 int virgl_egl_get_fd_for_texture(struct virgl_egl *egl, uint32_t tex_id, int *fd)
514 {
515 EGLImageKHR image;
516 EGLint stride;
517 EGLint offset;
518 EGLBoolean success;
519 int ret;
520 image = eglCreateImageKHR(egl->egl_display, eglGetCurrentContext(), EGL_GL_TEXTURE_2D_KHR,
521 (EGLClientBuffer)(unsigned long)tex_id, NULL);
522
523 if (!image)
524 return EINVAL;
525
526 ret = EINVAL;
527 if (has_bit(egl->extension_bits, EGL_MESA_IMAGE_DMA_BUF_EXPORT)) {
528 success = eglExportDMABUFImageMESA(egl->egl_display, image, fd, &stride,
529 &offset);
530 if (!success)
531 goto out_destroy;
532 } else if (has_bit(egl->extension_bits, EGL_MESA_DRM_IMAGE)) {
533 EGLint handle;
534 success = eglExportDRMImageMESA(egl->egl_display, image, NULL, &handle,
535 &stride);
536
537 if (!success)
538 goto out_destroy;
539
540 if (!egl->gbm)
541 goto out_destroy;
542
543 ret = virgl_gbm_export_fd(egl->gbm->device, handle, fd);
544 if (ret < 0)
545 goto out_destroy;
546 } else {
547 goto out_destroy;
548 }
549
550 ret = 0;
551 out_destroy:
552 eglDestroyImageKHR(egl->egl_display, image);
553 return ret;
554 }
555
virgl_has_egl_khr_gl_colorspace(struct virgl_egl * egl)556 bool virgl_has_egl_khr_gl_colorspace(struct virgl_egl *egl)
557 {
558 return has_bit(egl->extension_bits, EGL_KHR_GL_COLORSPACE);
559 }
560
virgl_egl_image_from_dmabuf(struct virgl_egl * egl,uint32_t width,uint32_t height,uint32_t drm_format,uint64_t drm_modifier,uint32_t plane_count,const int * plane_fds,const uint32_t * plane_strides,const uint32_t * plane_offsets)561 void *virgl_egl_image_from_dmabuf(struct virgl_egl *egl,
562 uint32_t width,
563 uint32_t height,
564 uint32_t drm_format,
565 uint64_t drm_modifier,
566 uint32_t plane_count,
567 const int *plane_fds,
568 const uint32_t *plane_strides,
569 const uint32_t *plane_offsets)
570 {
571 EGLint attrs[6 + VIRGL_GBM_MAX_PLANES * 10 + 1];
572 uint32_t count;
573
574 assert(VIRGL_GBM_MAX_PLANES <= 4);
575 assert(plane_count && plane_count <= VIRGL_GBM_MAX_PLANES);
576
577 count = 0;
578 attrs[count++] = EGL_WIDTH;
579 attrs[count++] = width;
580 attrs[count++] = EGL_HEIGHT;
581 attrs[count++] = height;
582 attrs[count++] = EGL_LINUX_DRM_FOURCC_EXT;
583 attrs[count++] = drm_format;
584 for (uint32_t i = 0; i < plane_count; i++) {
585 if (i < 3) {
586 attrs[count++] = EGL_DMA_BUF_PLANE0_FD_EXT + i * 3;
587 attrs[count++] = plane_fds[i];
588 attrs[count++] = EGL_DMA_BUF_PLANE0_PITCH_EXT + i * 3;
589 attrs[count++] = plane_strides[i];
590 attrs[count++] = EGL_DMA_BUF_PLANE0_OFFSET_EXT + i * 3;
591 attrs[count++] = plane_offsets[i];
592 }
593
594 if (has_bit(egl->extension_bits, EGL_EXT_IMAGE_DMA_BUF_IMPORT_MODIFIERS)) {
595 if (i == 3) {
596 attrs[count++] = EGL_DMA_BUF_PLANE3_FD_EXT;
597 attrs[count++] = plane_fds[i];
598 attrs[count++] = EGL_DMA_BUF_PLANE3_PITCH_EXT;
599 attrs[count++] = plane_strides[i];
600 attrs[count++] = EGL_DMA_BUF_PLANE3_OFFSET_EXT;
601 attrs[count++] = plane_offsets[i];
602 }
603
604 if (drm_modifier != DRM_FORMAT_MOD_INVALID) {
605 attrs[count++] = EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT + i * 2;
606 attrs[count++] = (uint32_t)drm_modifier;
607 attrs[count++] = EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT + i * 2;
608 attrs[count++] = (uint32_t)(drm_modifier >> 32);
609 }
610 }
611 }
612 attrs[count++] = EGL_NONE;
613 assert(count <= ARRAY_SIZE(attrs));
614
615 return (void *)eglCreateImageKHR(egl->egl_display,
616 EGL_NO_CONTEXT,
617 EGL_LINUX_DMA_BUF_EXT,
618 (EGLClientBuffer)NULL,
619 attrs);
620 }
621
virgl_egl_image_destroy(struct virgl_egl * egl,void * image)622 void virgl_egl_image_destroy(struct virgl_egl *egl, void *image)
623 {
624 eglDestroyImageKHR(egl->egl_display, image);
625 }
626
627 #ifdef ENABLE_MINIGBM_ALLOCATION
virgl_egl_image_from_gbm_bo(struct virgl_egl * egl,struct gbm_bo * bo)628 void *virgl_egl_image_from_gbm_bo(struct virgl_egl *egl, struct gbm_bo *bo)
629 {
630 int ret;
631 void *image = NULL;
632 int fds[VIRGL_GBM_MAX_PLANES] = {-1, -1, -1, -1};
633 uint32_t strides[VIRGL_GBM_MAX_PLANES];
634 uint32_t offsets[VIRGL_GBM_MAX_PLANES];
635 int num_planes = gbm_bo_get_plane_count(bo);
636
637 if (num_planes < 0 || num_planes > VIRGL_GBM_MAX_PLANES)
638 return NULL;
639
640 for (int plane = 0; plane < num_planes; plane++) {
641 uint32_t handle = gbm_bo_get_handle_for_plane(bo, plane).u32;
642 ret = virgl_gbm_export_fd(egl->gbm->device, handle, &fds[plane]);
643 if (ret < 0) {
644 vrend_printf( "failed to export plane handle\n");
645 goto out_close;
646 }
647
648 strides[plane] = gbm_bo_get_stride_for_plane(bo, plane);
649 offsets[plane] = gbm_bo_get_offset(bo, plane);
650 }
651
652 image = virgl_egl_image_from_dmabuf(egl,
653 gbm_bo_get_width(bo),
654 gbm_bo_get_height(bo),
655 gbm_bo_get_format(bo),
656 gbm_bo_get_modifier(bo),
657 num_planes,
658 fds,
659 strides,
660 offsets);
661
662 out_close:
663 for (int plane = 0; plane < num_planes; plane++)
664 close(fds[plane]);
665
666 return image;
667 }
668
virgl_egl_aux_plane_image_from_gbm_bo(struct virgl_egl * egl,struct gbm_bo * bo,int plane)669 void *virgl_egl_aux_plane_image_from_gbm_bo(struct virgl_egl *egl, struct gbm_bo *bo, int plane)
670 {
671 int ret;
672 void *image = NULL;
673 int fd = -1;
674
675 int bytes_per_pixel = virgl_gbm_get_plane_bytes_per_pixel(bo, plane);
676 if (bytes_per_pixel != 1 && bytes_per_pixel != 2)
677 return NULL;
678
679 uint32_t handle = gbm_bo_get_handle_for_plane(bo, plane).u32;
680 ret = drmPrimeHandleToFD(gbm_device_get_fd(egl->gbm->device), handle, DRM_CLOEXEC, &fd);
681 if (ret < 0) {
682 vrend_printf("failed to export plane handle %d\n", errno);
683 return NULL;
684 }
685
686 const uint32_t format = bytes_per_pixel == 1 ? GBM_FORMAT_R8 : GBM_FORMAT_GR88;
687 const uint32_t stride = gbm_bo_get_stride_for_plane(bo, plane);
688 const uint32_t offset = gbm_bo_get_offset(bo, plane);
689 image = virgl_egl_image_from_dmabuf(egl,
690 virgl_gbm_get_plane_width(bo, plane),
691 virgl_gbm_get_plane_height(bo, plane),
692 format,
693 gbm_bo_get_modifier(bo),
694 1,
695 &fd,
696 &stride,
697 &offset);
698 close(fd);
699
700 return image;
701 }
702 #endif /* ENABLE_MINIGBM_ALLOCATION */
703
virgl_egl_supports_fences(struct virgl_egl * egl)704 bool virgl_egl_supports_fences(struct virgl_egl *egl)
705 {
706 return (egl && has_bit(egl->extension_bits, EGL_KHR_FENCE_SYNC_ANDROID));
707 }
708
virgl_egl_fence_create(struct virgl_egl * egl)709 EGLSyncKHR virgl_egl_fence_create(struct virgl_egl *egl)
710 {
711 if (!egl || !has_bit(egl->extension_bits, EGL_KHR_FENCE_SYNC_ANDROID)) {
712 return EGL_NO_SYNC_KHR;
713 }
714
715 return eglCreateSyncKHR(egl->egl_display, EGL_SYNC_NATIVE_FENCE_ANDROID, NULL);
716 }
717
virgl_egl_fence_destroy(struct virgl_egl * egl,EGLSyncKHR fence)718 void virgl_egl_fence_destroy(struct virgl_egl *egl, EGLSyncKHR fence) {
719 eglDestroySyncKHR(egl->egl_display, fence);
720 }
721
virgl_egl_client_wait_fence(struct virgl_egl * egl,EGLSyncKHR fence,uint64_t timeout)722 bool virgl_egl_client_wait_fence(struct virgl_egl *egl, EGLSyncKHR fence, uint64_t timeout)
723 {
724 EGLint ret = eglClientWaitSyncKHR(egl->egl_display, fence, 0, timeout);
725 if (ret == EGL_FALSE) {
726 vrend_printf("wait sync failed\n");
727 }
728 return ret != EGL_TIMEOUT_EXPIRED_KHR;
729 }
730
virgl_egl_export_signaled_fence(struct virgl_egl * egl,int * out_fd)731 bool virgl_egl_export_signaled_fence(struct virgl_egl *egl, int *out_fd) {
732 return virgl_egl_export_fence(egl, egl->signaled_fence, out_fd);
733 }
734
virgl_egl_export_fence(struct virgl_egl * egl,EGLSyncKHR fence,int * out_fd)735 bool virgl_egl_export_fence(struct virgl_egl *egl, EGLSyncKHR fence, int *out_fd) {
736 *out_fd = eglDupNativeFenceFDANDROID(egl->egl_display, fence);
737 return *out_fd != EGL_NO_NATIVE_FENCE_FD_ANDROID;
738 }
739
virgl_egl_different_gpu(struct virgl_egl * egl)740 bool virgl_egl_different_gpu(struct virgl_egl *egl)
741 {
742 return egl->different_gpu;
743 }
744